1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
32 #include "hydrogen-infer-representation.h"
34 #if V8_TARGET_ARCH_IA32
35 #include "ia32/lithium-ia32.h"
36 #elif V8_TARGET_ARCH_X64
37 #include "x64/lithium-x64.h"
38 #elif V8_TARGET_ARCH_ARM
39 #include "arm/lithium-arm.h"
40 #elif V8_TARGET_ARCH_MIPS
41 #include "mips/lithium-mips.h"
43 #error Unsupported target architecture.
49 #define DEFINE_COMPILE(type) \
50 LInstruction* H##type::CompileToLithium(LChunkBuilder* builder) { \
51 return builder->Do##type(this); \
53 HYDROGEN_CONCRETE_INSTRUCTION_LIST(DEFINE_COMPILE)
57 Isolate* HValue::isolate() const {
58 ASSERT(block() != NULL);
59 return block()->isolate();
63 void HValue::AssumeRepresentation(Representation r) {
64 if (CheckFlag(kFlexibleRepresentation)) {
65 ChangeRepresentation(r);
66 // The representation of the value is dictated by type feedback and
67 // will not be changed later.
68 ClearFlag(kFlexibleRepresentation);
73 void HValue::InferRepresentation(HInferRepresentationPhase* h_infer) {
74 ASSERT(CheckFlag(kFlexibleRepresentation));
75 Representation new_rep = RepresentationFromInputs();
76 UpdateRepresentation(new_rep, h_infer, "inputs");
77 new_rep = RepresentationFromUses();
78 UpdateRepresentation(new_rep, h_infer, "uses");
79 if (representation().IsSmi() && HasNonSmiUse()) {
81 Representation::Integer32(), h_infer, "use requirements");
86 Representation HValue::RepresentationFromUses() {
87 if (HasNoUses()) return Representation::None();
89 // Array of use counts for each representation.
90 int use_count[Representation::kNumRepresentations] = { 0 };
92 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
93 HValue* use = it.value();
94 Representation rep = use->observed_input_representation(it.index());
95 if (rep.IsNone()) continue;
96 if (FLAG_trace_representation) {
97 PrintF("#%d %s is used by #%d %s as %s%s\n",
98 id(), Mnemonic(), use->id(), use->Mnemonic(), rep.Mnemonic(),
99 (use->CheckFlag(kTruncatingToInt32) ? "-trunc" : ""));
101 use_count[rep.kind()] += 1;
103 if (IsPhi()) HPhi::cast(this)->AddIndirectUsesTo(&use_count[0]);
104 int tagged_count = use_count[Representation::kTagged];
105 int double_count = use_count[Representation::kDouble];
106 int int32_count = use_count[Representation::kInteger32];
107 int smi_count = use_count[Representation::kSmi];
109 if (tagged_count > 0) return Representation::Tagged();
110 if (double_count > 0) return Representation::Double();
111 if (int32_count > 0) return Representation::Integer32();
112 if (smi_count > 0) return Representation::Smi();
114 return Representation::None();
118 void HValue::UpdateRepresentation(Representation new_rep,
119 HInferRepresentationPhase* h_infer,
120 const char* reason) {
121 Representation r = representation();
122 if (new_rep.is_more_general_than(r)) {
123 if (CheckFlag(kCannotBeTagged) && new_rep.IsTagged()) return;
124 if (FLAG_trace_representation) {
125 PrintF("Changing #%d %s representation %s -> %s based on %s\n",
126 id(), Mnemonic(), r.Mnemonic(), new_rep.Mnemonic(), reason);
128 ChangeRepresentation(new_rep);
129 AddDependantsToWorklist(h_infer);
134 void HValue::AddDependantsToWorklist(HInferRepresentationPhase* h_infer) {
135 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
136 h_infer->AddToWorklist(it.value());
138 for (int i = 0; i < OperandCount(); ++i) {
139 h_infer->AddToWorklist(OperandAt(i));
144 static int32_t ConvertAndSetOverflow(Representation r,
148 if (result > Smi::kMaxValue) {
150 return Smi::kMaxValue;
152 if (result < Smi::kMinValue) {
154 return Smi::kMinValue;
157 if (result > kMaxInt) {
161 if (result < kMinInt) {
166 return static_cast<int32_t>(result);
170 static int32_t AddWithoutOverflow(Representation r,
174 int64_t result = static_cast<int64_t>(a) + static_cast<int64_t>(b);
175 return ConvertAndSetOverflow(r, result, overflow);
179 static int32_t SubWithoutOverflow(Representation r,
183 int64_t result = static_cast<int64_t>(a) - static_cast<int64_t>(b);
184 return ConvertAndSetOverflow(r, result, overflow);
188 static int32_t MulWithoutOverflow(const Representation& r,
192 int64_t result = static_cast<int64_t>(a) * static_cast<int64_t>(b);
193 return ConvertAndSetOverflow(r, result, overflow);
197 int32_t Range::Mask() const {
198 if (lower_ == upper_) return lower_;
201 while (res < upper_) {
202 res = (res << 1) | 1;
210 void Range::AddConstant(int32_t value) {
211 if (value == 0) return;
212 bool may_overflow = false; // Overflow is ignored here.
213 Representation r = Representation::Integer32();
214 lower_ = AddWithoutOverflow(r, lower_, value, &may_overflow);
215 upper_ = AddWithoutOverflow(r, upper_, value, &may_overflow);
222 void Range::Intersect(Range* other) {
223 upper_ = Min(upper_, other->upper_);
224 lower_ = Max(lower_, other->lower_);
225 bool b = CanBeMinusZero() && other->CanBeMinusZero();
226 set_can_be_minus_zero(b);
230 void Range::Union(Range* other) {
231 upper_ = Max(upper_, other->upper_);
232 lower_ = Min(lower_, other->lower_);
233 bool b = CanBeMinusZero() || other->CanBeMinusZero();
234 set_can_be_minus_zero(b);
238 void Range::CombinedMax(Range* other) {
239 upper_ = Max(upper_, other->upper_);
240 lower_ = Max(lower_, other->lower_);
241 set_can_be_minus_zero(CanBeMinusZero() || other->CanBeMinusZero());
245 void Range::CombinedMin(Range* other) {
246 upper_ = Min(upper_, other->upper_);
247 lower_ = Min(lower_, other->lower_);
248 set_can_be_minus_zero(CanBeMinusZero() || other->CanBeMinusZero());
252 void Range::Sar(int32_t value) {
253 int32_t bits = value & 0x1F;
254 lower_ = lower_ >> bits;
255 upper_ = upper_ >> bits;
256 set_can_be_minus_zero(false);
260 void Range::Shl(int32_t value) {
261 int32_t bits = value & 0x1F;
262 int old_lower = lower_;
263 int old_upper = upper_;
264 lower_ = lower_ << bits;
265 upper_ = upper_ << bits;
266 if (old_lower != lower_ >> bits || old_upper != upper_ >> bits) {
270 set_can_be_minus_zero(false);
274 bool Range::AddAndCheckOverflow(const Representation& r, Range* other) {
275 bool may_overflow = false;
276 lower_ = AddWithoutOverflow(r, lower_, other->lower(), &may_overflow);
277 upper_ = AddWithoutOverflow(r, upper_, other->upper(), &may_overflow);
286 bool Range::SubAndCheckOverflow(const Representation& r, Range* other) {
287 bool may_overflow = false;
288 lower_ = SubWithoutOverflow(r, lower_, other->upper(), &may_overflow);
289 upper_ = SubWithoutOverflow(r, upper_, other->lower(), &may_overflow);
298 void Range::KeepOrder() {
299 if (lower_ > upper_) {
300 int32_t tmp = lower_;
308 void Range::Verify() const {
309 ASSERT(lower_ <= upper_);
314 bool Range::MulAndCheckOverflow(const Representation& r, Range* other) {
315 bool may_overflow = false;
316 int v1 = MulWithoutOverflow(r, lower_, other->lower(), &may_overflow);
317 int v2 = MulWithoutOverflow(r, lower_, other->upper(), &may_overflow);
318 int v3 = MulWithoutOverflow(r, upper_, other->lower(), &may_overflow);
319 int v4 = MulWithoutOverflow(r, upper_, other->upper(), &may_overflow);
320 lower_ = Min(Min(v1, v2), Min(v3, v4));
321 upper_ = Max(Max(v1, v2), Max(v3, v4));
329 const char* HType::ToString() {
330 // Note: The c1visualizer syntax for locals allows only a sequence of the
331 // following characters: A-Za-z0-9_-|:
333 case kNone: return "none";
334 case kTagged: return "tagged";
335 case kTaggedPrimitive: return "primitive";
336 case kTaggedNumber: return "number";
337 case kSmi: return "smi";
338 case kHeapNumber: return "heap-number";
339 case kFloat32x4: return "float32x4";
340 case kInt32x4: return "int32x4";
341 case kString: return "string";
342 case kBoolean: return "boolean";
343 case kNonPrimitive: return "non-primitive";
344 case kJSArray: return "array";
345 case kJSObject: return "object";
348 return "unreachable";
352 HType HType::TypeFromValue(Handle<Object> value) {
353 HType result = HType::Tagged();
354 if (value->IsSmi()) {
355 result = HType::Smi();
356 } else if (value->IsHeapNumber()) {
357 result = HType::HeapNumber();
358 } else if (value->IsFloat32x4()) {
359 result = HType::Float32x4();
360 } else if (value->IsInt32x4()) {
361 result = HType::Int32x4();
362 } else if (value->IsString()) {
363 result = HType::String();
364 } else if (value->IsBoolean()) {
365 result = HType::Boolean();
366 } else if (value->IsJSObject()) {
367 result = HType::JSObject();
368 } else if (value->IsJSArray()) {
369 result = HType::JSArray();
375 HType HType::TypeFromRepresentation(Representation representation) {
376 HType result = HType::Tagged();
377 if (representation.IsSmi()) {
378 result = HType::Smi();
379 } else if (representation.IsDouble()) {
380 result = HType::HeapNumber();
381 } else if (representation.IsFloat32x4()) {
382 result = HType::Float32x4();
383 } else if (representation.IsInt32x4()) {
384 result = HType::Int32x4();
390 bool HValue::IsDefinedAfter(HBasicBlock* other) const {
391 return block()->block_id() > other->block_id();
395 HUseListNode* HUseListNode::tail() {
396 // Skip and remove dead items in the use list.
397 while (tail_ != NULL && tail_->value()->CheckFlag(HValue::kIsDead)) {
398 tail_ = tail_->tail_;
404 bool HValue::CheckUsesForFlag(Flag f) const {
405 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
406 if (it.value()->IsSimulate()) continue;
407 if (!it.value()->CheckFlag(f)) return false;
413 bool HValue::CheckUsesForFlag(Flag f, HValue** value) const {
414 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
415 if (it.value()->IsSimulate()) continue;
416 if (!it.value()->CheckFlag(f)) {
425 bool HValue::HasAtLeastOneUseWithFlagAndNoneWithout(Flag f) const {
426 bool return_value = false;
427 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
428 if (it.value()->IsSimulate()) continue;
429 if (!it.value()->CheckFlag(f)) return false;
436 HUseIterator::HUseIterator(HUseListNode* head) : next_(head) {
441 void HUseIterator::Advance() {
443 if (current_ != NULL) {
444 next_ = current_->tail();
445 value_ = current_->value();
446 index_ = current_->index();
451 int HValue::UseCount() const {
453 for (HUseIterator it(uses()); !it.Done(); it.Advance()) ++count;
458 HUseListNode* HValue::RemoveUse(HValue* value, int index) {
459 HUseListNode* previous = NULL;
460 HUseListNode* current = use_list_;
461 while (current != NULL) {
462 if (current->value() == value && current->index() == index) {
463 if (previous == NULL) {
464 use_list_ = current->tail();
466 previous->set_tail(current->tail());
472 current = current->tail();
476 // Do not reuse use list nodes in debug mode, zap them.
477 if (current != NULL) {
480 HUseListNode(current->value(), current->index(), NULL);
489 bool HValue::Equals(HValue* other) {
490 if (other->opcode() != opcode()) return false;
491 if (!other->representation().Equals(representation())) return false;
492 if (!other->type_.Equals(type_)) return false;
493 if (other->flags() != flags()) return false;
494 if (OperandCount() != other->OperandCount()) return false;
495 for (int i = 0; i < OperandCount(); ++i) {
496 if (OperandAt(i)->id() != other->OperandAt(i)->id()) return false;
498 bool result = DataEquals(other);
499 ASSERT(!result || Hashcode() == other->Hashcode());
504 intptr_t HValue::Hashcode() {
505 intptr_t result = opcode();
506 int count = OperandCount();
507 for (int i = 0; i < count; ++i) {
508 result = result * 19 + OperandAt(i)->id() + (result >> 7);
514 const char* HValue::Mnemonic() const {
516 #define MAKE_CASE(type) case k##type: return #type;
517 HYDROGEN_CONCRETE_INSTRUCTION_LIST(MAKE_CASE)
519 case kPhi: return "Phi";
525 bool HValue::CanReplaceWithDummyUses() {
526 return FLAG_unreachable_code_elimination &&
527 !(block()->IsReachable() ||
529 IsControlInstruction() ||
536 bool HValue::IsInteger32Constant() {
537 return IsConstant() && HConstant::cast(this)->HasInteger32Value();
541 int32_t HValue::GetInteger32Constant() {
542 return HConstant::cast(this)->Integer32Value();
546 bool HValue::EqualsInteger32Constant(int32_t value) {
547 return IsInteger32Constant() && GetInteger32Constant() == value;
551 void HValue::SetOperandAt(int index, HValue* value) {
552 RegisterUse(index, value);
553 InternalSetOperandAt(index, value);
557 void HValue::DeleteAndReplaceWith(HValue* other) {
558 // We replace all uses first, so Delete can assert that there are none.
559 if (other != NULL) ReplaceAllUsesWith(other);
565 void HValue::ReplaceAllUsesWith(HValue* other) {
566 while (use_list_ != NULL) {
567 HUseListNode* list_node = use_list_;
568 HValue* value = list_node->value();
569 ASSERT(!value->block()->IsStartBlock());
570 value->InternalSetOperandAt(list_node->index(), other);
571 use_list_ = list_node->tail();
572 list_node->set_tail(other->use_list_);
573 other->use_list_ = list_node;
578 void HValue::Kill() {
579 // Instead of going through the entire use list of each operand, we only
580 // check the first item in each use list and rely on the tail() method to
581 // skip dead items, removing them lazily next time we traverse the list.
583 for (int i = 0; i < OperandCount(); ++i) {
584 HValue* operand = OperandAt(i);
585 if (operand == NULL) continue;
586 HUseListNode* first = operand->use_list_;
587 if (first != NULL && first->value()->CheckFlag(kIsDead)) {
588 operand->use_list_ = first->tail();
594 void HValue::SetBlock(HBasicBlock* block) {
595 ASSERT(block_ == NULL || block == NULL);
597 if (id_ == kNoNumber && block != NULL) {
598 id_ = block->graph()->GetNextValueID(this);
603 void HValue::PrintTypeTo(StringStream* stream) {
604 if (!representation().IsTagged() || type().Equals(HType::Tagged())) return;
605 stream->Add(" type:%s", type().ToString());
609 void HValue::PrintRangeTo(StringStream* stream) {
610 if (range() == NULL || range()->IsMostGeneric()) return;
611 // Note: The c1visualizer syntax for locals allows only a sequence of the
612 // following characters: A-Za-z0-9_-|:
613 stream->Add(" range:%d_%d%s",
616 range()->CanBeMinusZero() ? "_m0" : "");
620 void HValue::PrintChangesTo(StringStream* stream) {
621 GVNFlagSet changes_flags = ChangesFlags();
622 if (changes_flags.IsEmpty()) return;
623 stream->Add(" changes[");
624 if (changes_flags == AllSideEffectsFlagSet()) {
627 bool add_comma = false;
628 #define PRINT_DO(type) \
629 if (changes_flags.Contains(kChanges##type)) { \
630 if (add_comma) stream->Add(","); \
632 stream->Add(#type); \
634 GVN_TRACKED_FLAG_LIST(PRINT_DO);
635 GVN_UNTRACKED_FLAG_LIST(PRINT_DO);
642 void HValue::PrintNameTo(StringStream* stream) {
643 stream->Add("%s%d", representation_.Mnemonic(), id());
647 bool HValue::HasMonomorphicJSObjectType() {
648 return !GetMonomorphicJSObjectMap().is_null();
652 bool HValue::UpdateInferredType() {
653 HType type = CalculateInferredType();
654 bool result = (!type.Equals(type_));
660 void HValue::RegisterUse(int index, HValue* new_value) {
661 HValue* old_value = OperandAt(index);
662 if (old_value == new_value) return;
664 HUseListNode* removed = NULL;
665 if (old_value != NULL) {
666 removed = old_value->RemoveUse(this, index);
669 if (new_value != NULL) {
670 if (removed == NULL) {
671 new_value->use_list_ = new(new_value->block()->zone()) HUseListNode(
672 this, index, new_value->use_list_);
674 removed->set_tail(new_value->use_list_);
675 new_value->use_list_ = removed;
681 void HValue::AddNewRange(Range* r, Zone* zone) {
682 if (!HasRange()) ComputeInitialRange(zone);
683 if (!HasRange()) range_ = new(zone) Range();
685 r->StackUpon(range_);
690 void HValue::RemoveLastAddedRange() {
692 ASSERT(range_->next() != NULL);
693 range_ = range_->next();
697 void HValue::ComputeInitialRange(Zone* zone) {
699 range_ = InferRange(zone);
704 void HInstruction::PrintTo(StringStream* stream) {
705 PrintMnemonicTo(stream);
707 PrintRangeTo(stream);
708 PrintChangesTo(stream);
710 if (CheckFlag(HValue::kHasNoObservableSideEffects)) {
711 stream->Add(" [noOSE]");
713 if (CheckFlag(HValue::kIsDead)) {
714 stream->Add(" [dead]");
719 void HInstruction::PrintDataTo(StringStream *stream) {
720 for (int i = 0; i < OperandCount(); ++i) {
721 if (i > 0) stream->Add(" ");
722 OperandAt(i)->PrintNameTo(stream);
727 void HInstruction::PrintMnemonicTo(StringStream* stream) {
728 stream->Add("%s ", Mnemonic());
732 void HInstruction::Unlink() {
734 ASSERT(!IsControlInstruction()); // Must never move control instructions.
735 ASSERT(!IsBlockEntry()); // Doesn't make sense to delete these.
736 ASSERT(previous_ != NULL);
737 previous_->next_ = next_;
739 ASSERT(block()->last() == this);
740 block()->set_last(previous_);
742 next_->previous_ = previous_;
748 void HInstruction::InsertBefore(HInstruction* next) {
750 ASSERT(!next->IsBlockEntry());
751 ASSERT(!IsControlInstruction());
752 ASSERT(!next->block()->IsStartBlock());
753 ASSERT(next->previous_ != NULL);
754 HInstruction* prev = next->previous();
756 next->previous_ = this;
759 SetBlock(next->block());
760 if (position() == RelocInfo::kNoPosition &&
761 next->position() != RelocInfo::kNoPosition) {
762 set_position(next->position());
767 void HInstruction::InsertAfter(HInstruction* previous) {
769 ASSERT(!previous->IsControlInstruction());
770 ASSERT(!IsControlInstruction() || previous->next_ == NULL);
771 HBasicBlock* block = previous->block();
772 // Never insert anything except constants into the start block after finishing
774 if (block->IsStartBlock() && block->IsFinished() && !IsConstant()) {
775 ASSERT(block->end()->SecondSuccessor() == NULL);
776 InsertAfter(block->end()->FirstSuccessor()->first());
780 // If we're inserting after an instruction with side-effects that is
781 // followed by a simulate instruction, we need to insert after the
782 // simulate instruction instead.
783 HInstruction* next = previous->next_;
784 if (previous->HasObservableSideEffects() && next != NULL) {
785 ASSERT(next->IsSimulate());
787 next = previous->next_;
790 previous_ = previous;
793 previous->next_ = this;
794 if (next != NULL) next->previous_ = this;
795 if (block->last() == previous) {
796 block->set_last(this);
798 if (position() == RelocInfo::kNoPosition &&
799 previous->position() != RelocInfo::kNoPosition) {
800 set_position(previous->position());
806 void HInstruction::Verify() {
807 // Verify that input operands are defined before use.
808 HBasicBlock* cur_block = block();
809 for (int i = 0; i < OperandCount(); ++i) {
810 HValue* other_operand = OperandAt(i);
811 if (other_operand == NULL) continue;
812 HBasicBlock* other_block = other_operand->block();
813 if (cur_block == other_block) {
814 if (!other_operand->IsPhi()) {
815 HInstruction* cur = this->previous();
816 while (cur != NULL) {
817 if (cur == other_operand) break;
818 cur = cur->previous();
820 // Must reach other operand in the same block!
821 ASSERT(cur == other_operand);
824 // If the following assert fires, you may have forgotten an
826 ASSERT(other_block->Dominates(cur_block));
830 // Verify that instructions that may have side-effects are followed
831 // by a simulate instruction.
832 if (HasObservableSideEffects() && !IsOsrEntry()) {
833 ASSERT(next()->IsSimulate());
836 // Verify that instructions that can be eliminated by GVN have overridden
837 // HValue::DataEquals. The default implementation is UNREACHABLE. We
838 // don't actually care whether DataEquals returns true or false here.
839 if (CheckFlag(kUseGVN)) DataEquals(this);
841 // Verify that all uses are in the graph.
842 for (HUseIterator use = uses(); !use.Done(); use.Advance()) {
843 if (use.value()->IsInstruction()) {
844 ASSERT(HInstruction::cast(use.value())->IsLinked());
851 void HDummyUse::PrintDataTo(StringStream* stream) {
852 value()->PrintNameTo(stream);
856 void HEnvironmentMarker::PrintDataTo(StringStream* stream) {
857 stream->Add("%s var[%d]", kind() == BIND ? "bind" : "lookup", index());
861 void HUnaryCall::PrintDataTo(StringStream* stream) {
862 value()->PrintNameTo(stream);
864 stream->Add("#%d", argument_count());
868 void HCallJSFunction::PrintDataTo(StringStream* stream) {
869 function()->PrintNameTo(stream);
871 stream->Add("#%d", argument_count());
875 HCallJSFunction* HCallJSFunction::New(
880 bool pass_argument_count) {
881 bool has_stack_check = false;
882 if (function->IsConstant()) {
883 HConstant* fun_const = HConstant::cast(function);
884 Handle<JSFunction> jsfun =
885 Handle<JSFunction>::cast(fun_const->handle(zone->isolate()));
886 has_stack_check = !jsfun.is_null() &&
887 (jsfun->code()->kind() == Code::FUNCTION ||
888 jsfun->code()->kind() == Code::OPTIMIZED_FUNCTION);
891 return new(zone) HCallJSFunction(
892 function, argument_count, pass_argument_count,
899 void HBinaryCall::PrintDataTo(StringStream* stream) {
900 first()->PrintNameTo(stream);
902 second()->PrintNameTo(stream);
904 stream->Add("#%d", argument_count());
908 void HBoundsCheck::ApplyIndexChange() {
909 if (skip_check()) return;
911 DecompositionResult decomposition;
912 bool index_is_decomposable = index()->TryDecompose(&decomposition);
913 if (index_is_decomposable) {
914 ASSERT(decomposition.base() == base());
915 if (decomposition.offset() == offset() &&
916 decomposition.scale() == scale()) return;
921 ReplaceAllUsesWith(index());
923 HValue* current_index = decomposition.base();
924 int actual_offset = decomposition.offset() + offset();
925 int actual_scale = decomposition.scale() + scale();
927 Zone* zone = block()->graph()->zone();
928 HValue* context = block()->graph()->GetInvalidContext();
929 if (actual_offset != 0) {
930 HConstant* add_offset = HConstant::New(zone, context, actual_offset);
931 add_offset->InsertBefore(this);
932 HInstruction* add = HAdd::New(zone, context,
933 current_index, add_offset);
934 add->InsertBefore(this);
935 add->AssumeRepresentation(index()->representation());
936 add->ClearFlag(kCanOverflow);
940 if (actual_scale != 0) {
941 HConstant* sar_scale = HConstant::New(zone, context, actual_scale);
942 sar_scale->InsertBefore(this);
943 HInstruction* sar = HSar::New(zone, context,
944 current_index, sar_scale);
945 sar->InsertBefore(this);
946 sar->AssumeRepresentation(index()->representation());
950 SetOperandAt(0, current_index);
958 void HBoundsCheck::PrintDataTo(StringStream* stream) {
959 index()->PrintNameTo(stream);
961 length()->PrintNameTo(stream);
962 if (base() != NULL && (offset() != 0 || scale() != 0)) {
963 stream->Add(" base: ((");
964 if (base() != index()) {
965 index()->PrintNameTo(stream);
967 stream->Add("index");
969 stream->Add(" + %d) >> %d)", offset(), scale());
972 stream->Add(" [DISABLED]");
977 void HBoundsCheck::InferRepresentation(HInferRepresentationPhase* h_infer) {
978 ASSERT(CheckFlag(kFlexibleRepresentation));
979 HValue* actual_index = index()->ActualValue();
980 HValue* actual_length = length()->ActualValue();
981 Representation index_rep = actual_index->representation();
982 Representation length_rep = actual_length->representation();
983 if (index_rep.IsTagged() && actual_index->type().IsSmi()) {
984 index_rep = Representation::Smi();
986 if (length_rep.IsTagged() && actual_length->type().IsSmi()) {
987 length_rep = Representation::Smi();
989 Representation r = index_rep.generalize(length_rep);
990 if (r.is_more_general_than(Representation::Integer32())) {
991 r = Representation::Integer32();
993 UpdateRepresentation(r, h_infer, "boundscheck");
997 Range* HBoundsCheck::InferRange(Zone* zone) {
998 Representation r = representation();
999 if (r.IsSmiOrInteger32() && length()->HasRange()) {
1000 int upper = length()->range()->upper() - (allow_equality() ? 0 : 1);
1003 Range* result = new(zone) Range(lower, upper);
1004 if (index()->HasRange()) {
1005 result->Intersect(index()->range());
1008 // In case of Smi representation, clamp result to Smi::kMaxValue.
1009 if (r.IsSmi()) result->ClampToSmi();
1012 return HValue::InferRange(zone);
1016 void HBoundsCheckBaseIndexInformation::PrintDataTo(StringStream* stream) {
1017 stream->Add("base: ");
1018 base_index()->PrintNameTo(stream);
1019 stream->Add(", check: ");
1020 base_index()->PrintNameTo(stream);
1024 void HCallWithDescriptor::PrintDataTo(StringStream* stream) {
1025 for (int i = 0; i < OperandCount(); i++) {
1026 OperandAt(i)->PrintNameTo(stream);
1029 stream->Add("#%d", argument_count());
1033 void HCallNewArray::PrintDataTo(StringStream* stream) {
1034 stream->Add(ElementsKindToString(elements_kind()));
1036 HBinaryCall::PrintDataTo(stream);
1040 void HCallRuntime::PrintDataTo(StringStream* stream) {
1041 stream->Add("%o ", *name());
1042 if (save_doubles() == kSaveFPRegs) {
1043 stream->Add("[save doubles] ");
1045 stream->Add("#%d", argument_count());
1049 void HClassOfTestAndBranch::PrintDataTo(StringStream* stream) {
1050 stream->Add("class_of_test(");
1051 value()->PrintNameTo(stream);
1052 stream->Add(", \"%o\")", *class_name());
1056 void HWrapReceiver::PrintDataTo(StringStream* stream) {
1057 receiver()->PrintNameTo(stream);
1059 function()->PrintNameTo(stream);
1063 void HAccessArgumentsAt::PrintDataTo(StringStream* stream) {
1064 arguments()->PrintNameTo(stream);
1066 index()->PrintNameTo(stream);
1067 stream->Add("], length ");
1068 length()->PrintNameTo(stream);
1072 void HControlInstruction::PrintDataTo(StringStream* stream) {
1073 stream->Add(" goto (");
1074 bool first_block = true;
1075 for (HSuccessorIterator it(this); !it.Done(); it.Advance()) {
1076 stream->Add(first_block ? "B%d" : ", B%d", it.Current()->block_id());
1077 first_block = false;
1083 void HUnaryControlInstruction::PrintDataTo(StringStream* stream) {
1084 value()->PrintNameTo(stream);
1085 HControlInstruction::PrintDataTo(stream);
1089 void HReturn::PrintDataTo(StringStream* stream) {
1090 value()->PrintNameTo(stream);
1091 stream->Add(" (pop ");
1092 parameter_count()->PrintNameTo(stream);
1093 stream->Add(" values)");
1097 Representation HBranch::observed_input_representation(int index) {
1098 static const ToBooleanStub::Types tagged_types(
1099 ToBooleanStub::NULL_TYPE |
1100 ToBooleanStub::SPEC_OBJECT |
1101 ToBooleanStub::STRING |
1102 ToBooleanStub::SYMBOL);
1103 if (expected_input_types_.ContainsAnyOf(tagged_types)) {
1104 return Representation::Tagged();
1106 if (expected_input_types_.Contains(ToBooleanStub::UNDEFINED)) {
1107 if (expected_input_types_.Contains(ToBooleanStub::HEAP_NUMBER)) {
1108 return Representation::Double();
1110 return Representation::Tagged();
1112 if (expected_input_types_.Contains(ToBooleanStub::HEAP_NUMBER)) {
1113 return Representation::Double();
1115 if (expected_input_types_.Contains(ToBooleanStub::SMI)) {
1116 return Representation::Smi();
1118 return Representation::None();
1122 bool HBranch::KnownSuccessorBlock(HBasicBlock** block) {
1123 HValue* value = this->value();
1124 if (value->EmitAtUses()) {
1125 ASSERT(value->IsConstant());
1126 ASSERT(!value->representation().IsDouble());
1127 *block = HConstant::cast(value)->BooleanValue()
1129 : SecondSuccessor();
1137 void HCompareMap::PrintDataTo(StringStream* stream) {
1138 value()->PrintNameTo(stream);
1139 stream->Add(" (%p)", *map().handle());
1140 HControlInstruction::PrintDataTo(stream);
1141 if (known_successor_index() == 0) {
1142 stream->Add(" [true]");
1143 } else if (known_successor_index() == 1) {
1144 stream->Add(" [false]");
1149 const char* HUnaryMathOperation::OpName() const {
1151 case kMathFloor: return "floor";
1152 case kMathRound: return "round";
1153 case kMathAbs: return "abs";
1154 case kMathLog: return "log";
1155 case kMathExp: return "exp";
1156 case kMathSqrt: return "sqrt";
1157 case kMathPowHalf: return "pow-half";
1165 Range* HUnaryMathOperation::InferRange(Zone* zone) {
1166 Representation r = representation();
1167 if (r.IsSmiOrInteger32() && value()->HasRange()) {
1168 if (op() == kMathAbs) {
1169 int upper = value()->range()->upper();
1170 int lower = value()->range()->lower();
1171 bool spans_zero = value()->range()->CanBeZero();
1172 // Math.abs(kMinInt) overflows its representation, on which the
1173 // instruction deopts. Hence clamp it to kMaxInt.
1174 int abs_upper = upper == kMinInt ? kMaxInt : abs(upper);
1175 int abs_lower = lower == kMinInt ? kMaxInt : abs(lower);
1177 new(zone) Range(spans_zero ? 0 : Min(abs_lower, abs_upper),
1178 Max(abs_lower, abs_upper));
1179 // In case of Smi representation, clamp Math.abs(Smi::kMinValue) to
1181 if (r.IsSmi()) result->ClampToSmi();
1185 return HValue::InferRange(zone);
1189 void HUnaryMathOperation::PrintDataTo(StringStream* stream) {
1190 const char* name = OpName();
1191 stream->Add("%s ", name);
1192 value()->PrintNameTo(stream);
1196 void HUnaryOperation::PrintDataTo(StringStream* stream) {
1197 value()->PrintNameTo(stream);
1201 void HHasInstanceTypeAndBranch::PrintDataTo(StringStream* stream) {
1202 value()->PrintNameTo(stream);
1204 case FIRST_JS_RECEIVER_TYPE:
1205 if (to_ == LAST_TYPE) stream->Add(" spec_object");
1207 case JS_REGEXP_TYPE:
1208 if (to_ == JS_REGEXP_TYPE) stream->Add(" reg_exp");
1211 if (to_ == JS_ARRAY_TYPE) stream->Add(" array");
1213 case JS_FUNCTION_TYPE:
1214 if (to_ == JS_FUNCTION_TYPE) stream->Add(" function");
1222 void HTypeofIsAndBranch::PrintDataTo(StringStream* stream) {
1223 value()->PrintNameTo(stream);
1224 stream->Add(" == %o", *type_literal_);
1225 HControlInstruction::PrintDataTo(stream);
1229 bool HTypeofIsAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
1230 if (value()->representation().IsSpecialization()) {
1231 if (compares_number_type()) {
1232 *block = FirstSuccessor();
1234 *block = SecondSuccessor();
1237 } else if (value()->representation().IsFloat32x4()) {
1238 if (compares_float32x4_type()) {
1239 *block = FirstSuccessor();
1241 *block = SecondSuccessor();
1244 } else if (value()->representation().IsInt32x4()) {
1245 if (compares_int32x4_type()) {
1246 *block = FirstSuccessor();
1248 *block = SecondSuccessor();
1258 void HCheckMapValue::PrintDataTo(StringStream* stream) {
1259 value()->PrintNameTo(stream);
1261 map()->PrintNameTo(stream);
1265 void HForInPrepareMap::PrintDataTo(StringStream* stream) {
1266 enumerable()->PrintNameTo(stream);
1270 void HForInCacheArray::PrintDataTo(StringStream* stream) {
1271 enumerable()->PrintNameTo(stream);
1273 map()->PrintNameTo(stream);
1274 stream->Add("[%d]", idx_);
1278 void HLoadFieldByIndex::PrintDataTo(StringStream* stream) {
1279 object()->PrintNameTo(stream);
1281 index()->PrintNameTo(stream);
1285 static bool MatchLeftIsOnes(HValue* l, HValue* r, HValue** negated) {
1286 if (!l->EqualsInteger32Constant(~0)) return false;
1292 static bool MatchNegationViaXor(HValue* instr, HValue** negated) {
1293 if (!instr->IsBitwise()) return false;
1294 HBitwise* b = HBitwise::cast(instr);
1295 return (b->op() == Token::BIT_XOR) &&
1296 (MatchLeftIsOnes(b->left(), b->right(), negated) ||
1297 MatchLeftIsOnes(b->right(), b->left(), negated));
1301 static bool MatchDoubleNegation(HValue* instr, HValue** arg) {
1303 return MatchNegationViaXor(instr, &negated) &&
1304 MatchNegationViaXor(negated, arg);
1308 HValue* HBitwise::Canonicalize() {
1309 if (!representation().IsSmiOrInteger32()) return this;
1310 // If x is an int32, then x & -1 == x, x | 0 == x and x ^ 0 == x.
1311 int32_t nop_constant = (op() == Token::BIT_AND) ? -1 : 0;
1312 if (left()->EqualsInteger32Constant(nop_constant) &&
1313 !right()->CheckFlag(kUint32)) {
1316 if (right()->EqualsInteger32Constant(nop_constant) &&
1317 !left()->CheckFlag(kUint32)) {
1320 // Optimize double negation, a common pattern used for ToInt32(x).
1322 if (MatchDoubleNegation(this, &arg) && !arg->CheckFlag(kUint32)) {
1329 Representation HAdd::RepresentationFromInputs() {
1330 Representation left_rep = left()->representation();
1331 if (left_rep.IsExternal()) {
1332 return Representation::External();
1334 return HArithmeticBinaryOperation::RepresentationFromInputs();
1338 Representation HAdd::RequiredInputRepresentation(int index) {
1340 Representation left_rep = left()->representation();
1341 if (left_rep.IsExternal()) {
1342 return Representation::Integer32();
1345 return HArithmeticBinaryOperation::RequiredInputRepresentation(index);
1349 static bool IsIdentityOperation(HValue* arg1, HValue* arg2, int32_t identity) {
1350 return arg1->representation().IsSpecialization() &&
1351 arg2->EqualsInteger32Constant(identity);
1355 HValue* HAdd::Canonicalize() {
1356 // Adding 0 is an identity operation except in case of -0: -0 + 0 = +0
1357 if (IsIdentityOperation(left(), right(), 0) &&
1358 !left()->representation().IsDouble()) { // Left could be -0.
1361 if (IsIdentityOperation(right(), left(), 0) &&
1362 !left()->representation().IsDouble()) { // Right could be -0.
1369 HValue* HSub::Canonicalize() {
1370 if (IsIdentityOperation(left(), right(), 0)) return left();
1375 HValue* HMul::Canonicalize() {
1376 if (IsIdentityOperation(left(), right(), 1)) return left();
1377 if (IsIdentityOperation(right(), left(), 1)) return right();
1382 bool HMul::MulMinusOne() {
1383 if (left()->EqualsInteger32Constant(-1) ||
1384 right()->EqualsInteger32Constant(-1)) {
1392 HValue* HMod::Canonicalize() {
1397 HValue* HDiv::Canonicalize() {
1398 if (IsIdentityOperation(left(), right(), 1)) return left();
1403 HValue* HChange::Canonicalize() {
1404 return (from().Equals(to())) ? value() : this;
1408 HValue* HWrapReceiver::Canonicalize() {
1409 if (HasNoUses()) return NULL;
1410 if (receiver()->type().IsJSObject()) {
1417 void HTypeof::PrintDataTo(StringStream* stream) {
1418 value()->PrintNameTo(stream);
1422 HInstruction* HForceRepresentation::New(Zone* zone, HValue* context,
1423 HValue* value, Representation required_representation) {
1424 if (FLAG_fold_constants && value->IsConstant()) {
1425 HConstant* c = HConstant::cast(value);
1426 if (c->HasNumberValue()) {
1427 double double_res = c->DoubleValue();
1428 if (IsInt32Double(double_res)) {
1429 return HConstant::New(zone, context,
1430 static_cast<int32_t>(double_res),
1431 required_representation);
1435 return new(zone) HForceRepresentation(value, required_representation);
1439 void HForceRepresentation::PrintDataTo(StringStream* stream) {
1440 stream->Add("%s ", representation().Mnemonic());
1441 value()->PrintNameTo(stream);
1445 void HChange::PrintDataTo(StringStream* stream) {
1446 HUnaryOperation::PrintDataTo(stream);
1447 stream->Add(" %s to %s", from().Mnemonic(), to().Mnemonic());
1449 if (CanTruncateToInt32()) stream->Add(" truncating-int32");
1450 if (CheckFlag(kBailoutOnMinusZero)) stream->Add(" -0?");
1451 if (CheckFlag(kAllowUndefinedAsNaN)) stream->Add(" allow-undefined-as-nan");
1455 HValue* HUnaryMathOperation::Canonicalize() {
1456 if (op() == kMathRound || op() == kMathFloor) {
1457 HValue* val = value();
1458 if (val->IsChange()) val = HChange::cast(val)->value();
1459 if (val->representation().IsSmiOrInteger32()) {
1460 if (val->representation().Equals(representation())) return val;
1461 return Prepend(new(block()->zone()) HChange(
1462 val, representation(), false, false));
1465 if (op() == kMathFloor && value()->IsDiv() && value()->UseCount() == 1) {
1466 HDiv* hdiv = HDiv::cast(value());
1468 HValue* left = hdiv->left();
1469 if (left->representation().IsInteger32()) {
1470 // A value with an integer representation does not need to be transformed.
1471 } else if (left->IsChange() && HChange::cast(left)->from().IsInteger32()) {
1472 // A change from an integer32 can be replaced by the integer32 value.
1473 left = HChange::cast(left)->value();
1474 } else if (hdiv->observed_input_representation(1).IsSmiOrInteger32()) {
1475 left = Prepend(new(block()->zone()) HChange(
1476 left, Representation::Integer32(), false, false));
1481 HValue* right = hdiv->right();
1482 if (right->IsInteger32Constant()) {
1483 right = Prepend(HConstant::cast(right)->CopyToRepresentation(
1484 Representation::Integer32(), right->block()->zone()));
1485 } else if (right->representation().IsInteger32()) {
1486 // A value with an integer representation does not need to be transformed.
1487 } else if (right->IsChange() &&
1488 HChange::cast(right)->from().IsInteger32()) {
1489 // A change from an integer32 can be replaced by the integer32 value.
1490 right = HChange::cast(right)->value();
1491 } else if (hdiv->observed_input_representation(2).IsSmiOrInteger32()) {
1492 right = Prepend(new(block()->zone()) HChange(
1493 right, Representation::Integer32(), false, false));
1498 return Prepend(HMathFloorOfDiv::New(
1499 block()->zone(), context(), left, right));
1505 HValue* HCheckInstanceType::Canonicalize() {
1506 if (check_ == IS_STRING && value()->type().IsString()) {
1510 if (check_ == IS_INTERNALIZED_STRING && value()->IsConstant()) {
1511 if (HConstant::cast(value())->HasInternalizedStringValue()) {
1519 void HCheckInstanceType::GetCheckInterval(InstanceType* first,
1520 InstanceType* last) {
1521 ASSERT(is_interval_check());
1523 case IS_SPEC_OBJECT:
1524 *first = FIRST_SPEC_OBJECT_TYPE;
1525 *last = LAST_SPEC_OBJECT_TYPE;
1528 *first = *last = JS_ARRAY_TYPE;
1536 void HCheckInstanceType::GetCheckMaskAndTag(uint8_t* mask, uint8_t* tag) {
1537 ASSERT(!is_interval_check());
1540 *mask = kIsNotStringMask;
1543 case IS_INTERNALIZED_STRING:
1544 *mask = kIsNotStringMask | kIsNotInternalizedMask;
1545 *tag = kInternalizedTag;
1553 bool HCheckMaps::HandleSideEffectDominator(GVNFlag side_effect,
1554 HValue* dominator) {
1555 ASSERT(side_effect == kChangesMaps);
1556 // TODO(mstarzinger): For now we specialize on HStoreNamedField, but once
1557 // type information is rich enough we should generalize this to any HType
1558 // for which the map is known.
1559 if (HasNoUses() && dominator->IsStoreNamedField()) {
1560 HStoreNamedField* store = HStoreNamedField::cast(dominator);
1561 if (!store->has_transition() || store->object() != value()) return false;
1562 HConstant* transition = HConstant::cast(store->transition());
1563 if (map_set_.Contains(transition->GetUnique())) {
1564 DeleteAndReplaceWith(NULL);
1572 void HCheckMaps::PrintDataTo(StringStream* stream) {
1573 value()->PrintNameTo(stream);
1574 stream->Add(" [%p", *map_set_.at(0).handle());
1575 for (int i = 1; i < map_set_.size(); ++i) {
1576 stream->Add(",%p", *map_set_.at(i).handle());
1578 stream->Add("]%s", CanOmitMapChecks() ? "(omitted)" : "");
1582 void HCheckValue::PrintDataTo(StringStream* stream) {
1583 value()->PrintNameTo(stream);
1585 object().handle()->ShortPrint(stream);
1589 HValue* HCheckValue::Canonicalize() {
1590 return (value()->IsConstant() &&
1591 HConstant::cast(value())->GetUnique() == object_)
1597 const char* HCheckInstanceType::GetCheckName() {
1599 case IS_SPEC_OBJECT: return "object";
1600 case IS_JS_ARRAY: return "array";
1601 case IS_STRING: return "string";
1602 case IS_INTERNALIZED_STRING: return "internalized_string";
1609 void HCheckInstanceType::PrintDataTo(StringStream* stream) {
1610 stream->Add("%s ", GetCheckName());
1611 HUnaryOperation::PrintDataTo(stream);
1615 void HCallStub::PrintDataTo(StringStream* stream) {
1617 CodeStub::MajorName(major_key_, false));
1618 HUnaryCall::PrintDataTo(stream);
1622 void HUnknownOSRValue::PrintDataTo(StringStream *stream) {
1623 const char* type = "expression";
1624 if (environment_->is_local_index(index_)) type = "local";
1625 if (environment_->is_special_index(index_)) type = "special";
1626 if (environment_->is_parameter_index(index_)) type = "parameter";
1627 stream->Add("%s @ %d", type, index_);
1631 void HInstanceOf::PrintDataTo(StringStream* stream) {
1632 left()->PrintNameTo(stream);
1634 right()->PrintNameTo(stream);
1636 context()->PrintNameTo(stream);
1640 Range* HValue::InferRange(Zone* zone) {
1642 if (representation().IsSmi() || type().IsSmi()) {
1643 result = new(zone) Range(Smi::kMinValue, Smi::kMaxValue);
1644 result->set_can_be_minus_zero(false);
1646 result = new(zone) Range();
1647 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32));
1648 // TODO(jkummerow): The range cannot be minus zero when the upper type
1649 // bound is Integer32.
1655 Range* HChange::InferRange(Zone* zone) {
1656 Range* input_range = value()->range();
1657 if (from().IsInteger32() && !value()->CheckFlag(HInstruction::kUint32) &&
1660 input_range != NULL &&
1661 input_range->IsInSmiRange()))) {
1662 set_type(HType::Smi());
1663 ClearGVNFlag(kChangesNewSpacePromotion);
1665 Range* result = (input_range != NULL)
1666 ? input_range->Copy(zone)
1667 : HValue::InferRange(zone);
1668 result->set_can_be_minus_zero(!to().IsSmiOrInteger32() ||
1669 !(CheckFlag(kAllUsesTruncatingToInt32) ||
1670 CheckFlag(kAllUsesTruncatingToSmi)));
1671 if (to().IsSmi()) result->ClampToSmi();
1676 Range* HConstant::InferRange(Zone* zone) {
1677 if (has_int32_value_) {
1678 Range* result = new(zone) Range(int32_value_, int32_value_);
1679 result->set_can_be_minus_zero(false);
1682 return HValue::InferRange(zone);
1686 int HPhi::position() const {
1687 return block()->first()->position();
1691 Range* HPhi::InferRange(Zone* zone) {
1692 Representation r = representation();
1693 if (r.IsSmiOrInteger32()) {
1694 if (block()->IsLoopHeader()) {
1695 Range* range = r.IsSmi()
1696 ? new(zone) Range(Smi::kMinValue, Smi::kMaxValue)
1697 : new(zone) Range(kMinInt, kMaxInt);
1700 Range* range = OperandAt(0)->range()->Copy(zone);
1701 for (int i = 1; i < OperandCount(); ++i) {
1702 range->Union(OperandAt(i)->range());
1707 return HValue::InferRange(zone);
1712 Range* HAdd::InferRange(Zone* zone) {
1713 Representation r = representation();
1714 if (r.IsSmiOrInteger32()) {
1715 Range* a = left()->range();
1716 Range* b = right()->range();
1717 Range* res = a->Copy(zone);
1718 if (!res->AddAndCheckOverflow(r, b) ||
1719 (r.IsInteger32() && CheckFlag(kAllUsesTruncatingToInt32)) ||
1720 (r.IsSmi() && CheckFlag(kAllUsesTruncatingToSmi))) {
1721 ClearFlag(kCanOverflow);
1723 res->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToSmi) &&
1724 !CheckFlag(kAllUsesTruncatingToInt32) &&
1725 a->CanBeMinusZero() && b->CanBeMinusZero());
1728 return HValue::InferRange(zone);
1733 Range* HSub::InferRange(Zone* zone) {
1734 Representation r = representation();
1735 if (r.IsSmiOrInteger32()) {
1736 Range* a = left()->range();
1737 Range* b = right()->range();
1738 Range* res = a->Copy(zone);
1739 if (!res->SubAndCheckOverflow(r, b) ||
1740 (r.IsInteger32() && CheckFlag(kAllUsesTruncatingToInt32)) ||
1741 (r.IsSmi() && CheckFlag(kAllUsesTruncatingToSmi))) {
1742 ClearFlag(kCanOverflow);
1744 res->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToSmi) &&
1745 !CheckFlag(kAllUsesTruncatingToInt32) &&
1746 a->CanBeMinusZero() && b->CanBeZero());
1749 return HValue::InferRange(zone);
1754 Range* HMul::InferRange(Zone* zone) {
1755 Representation r = representation();
1756 if (r.IsSmiOrInteger32()) {
1757 Range* a = left()->range();
1758 Range* b = right()->range();
1759 Range* res = a->Copy(zone);
1760 if (!res->MulAndCheckOverflow(r, b) ||
1761 (((r.IsInteger32() && CheckFlag(kAllUsesTruncatingToInt32)) ||
1762 (r.IsSmi() && CheckFlag(kAllUsesTruncatingToSmi))) &&
1764 // Truncated int multiplication is too precise and therefore not the
1765 // same as converting to Double and back.
1766 // Handle truncated integer multiplication by -1 special.
1767 ClearFlag(kCanOverflow);
1769 res->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToSmi) &&
1770 !CheckFlag(kAllUsesTruncatingToInt32) &&
1771 ((a->CanBeZero() && b->CanBeNegative()) ||
1772 (a->CanBeNegative() && b->CanBeZero())));
1775 return HValue::InferRange(zone);
1780 Range* HDiv::InferRange(Zone* zone) {
1781 if (representation().IsInteger32()) {
1782 Range* a = left()->range();
1783 Range* b = right()->range();
1784 Range* result = new(zone) Range();
1785 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32) &&
1786 (a->CanBeMinusZero() ||
1787 (a->CanBeZero() && b->CanBeNegative())));
1788 if (!a->Includes(kMinInt) || !b->Includes(-1)) {
1789 ClearFlag(HValue::kCanOverflow);
1792 if (!b->CanBeZero()) {
1793 ClearFlag(HValue::kCanBeDivByZero);
1797 return HValue::InferRange(zone);
1802 Range* HMod::InferRange(Zone* zone) {
1803 if (representation().IsInteger32()) {
1804 Range* a = left()->range();
1805 Range* b = right()->range();
1807 // The magnitude of the modulus is bounded by the right operand. Note that
1808 // apart for the cases involving kMinInt, the calculation below is the same
1809 // as Max(Abs(b->lower()), Abs(b->upper())) - 1.
1810 int32_t positive_bound = -(Min(NegAbs(b->lower()), NegAbs(b->upper())) + 1);
1812 // The result of the modulo operation has the sign of its left operand.
1813 bool left_can_be_negative = a->CanBeMinusZero() || a->CanBeNegative();
1814 Range* result = new(zone) Range(left_can_be_negative ? -positive_bound : 0,
1815 a->CanBePositive() ? positive_bound : 0);
1817 result->set_can_be_minus_zero(!CheckFlag(kAllUsesTruncatingToInt32) &&
1818 left_can_be_negative);
1820 if (!a->Includes(kMinInt) || !b->Includes(-1)) {
1821 ClearFlag(HValue::kCanOverflow);
1824 if (!b->CanBeZero()) {
1825 ClearFlag(HValue::kCanBeDivByZero);
1829 return HValue::InferRange(zone);
1834 InductionVariableData* InductionVariableData::ExaminePhi(HPhi* phi) {
1835 if (phi->block()->loop_information() == NULL) return NULL;
1836 if (phi->OperandCount() != 2) return NULL;
1837 int32_t candidate_increment;
1839 candidate_increment = ComputeIncrement(phi, phi->OperandAt(0));
1840 if (candidate_increment != 0) {
1841 return new(phi->block()->graph()->zone())
1842 InductionVariableData(phi, phi->OperandAt(1), candidate_increment);
1845 candidate_increment = ComputeIncrement(phi, phi->OperandAt(1));
1846 if (candidate_increment != 0) {
1847 return new(phi->block()->graph()->zone())
1848 InductionVariableData(phi, phi->OperandAt(0), candidate_increment);
1856 * This function tries to match the following patterns (and all the relevant
1857 * variants related to |, & and + being commutative):
1858 * base | constant_or_mask
1859 * base & constant_and_mask
1860 * (base + constant_offset) & constant_and_mask
1861 * (base - constant_offset) & constant_and_mask
1863 void InductionVariableData::DecomposeBitwise(
1865 BitwiseDecompositionResult* result) {
1866 HValue* base = IgnoreOsrValue(value);
1867 result->base = value;
1869 if (!base->representation().IsInteger32()) return;
1871 if (base->IsBitwise()) {
1872 bool allow_offset = false;
1875 HBitwise* bitwise = HBitwise::cast(base);
1876 if (bitwise->right()->IsInteger32Constant()) {
1877 mask = bitwise->right()->GetInteger32Constant();
1878 base = bitwise->left();
1879 } else if (bitwise->left()->IsInteger32Constant()) {
1880 mask = bitwise->left()->GetInteger32Constant();
1881 base = bitwise->right();
1885 if (bitwise->op() == Token::BIT_AND) {
1886 result->and_mask = mask;
1887 allow_offset = true;
1888 } else if (bitwise->op() == Token::BIT_OR) {
1889 result->or_mask = mask;
1894 result->context = bitwise->context();
1897 if (base->IsAdd()) {
1898 HAdd* add = HAdd::cast(base);
1899 if (add->right()->IsInteger32Constant()) {
1901 } else if (add->left()->IsInteger32Constant()) {
1902 base = add->right();
1904 } else if (base->IsSub()) {
1905 HSub* sub = HSub::cast(base);
1906 if (sub->right()->IsInteger32Constant()) {
1912 result->base = base;
1917 void InductionVariableData::AddCheck(HBoundsCheck* check,
1918 int32_t upper_limit) {
1919 ASSERT(limit_validity() != NULL);
1920 if (limit_validity() != check->block() &&
1921 !limit_validity()->Dominates(check->block())) return;
1922 if (!phi()->block()->current_loop()->IsNestedInThisLoop(
1923 check->block()->current_loop())) return;
1925 ChecksRelatedToLength* length_checks = checks();
1926 while (length_checks != NULL) {
1927 if (length_checks->length() == check->length()) break;
1928 length_checks = length_checks->next();
1930 if (length_checks == NULL) {
1931 length_checks = new(check->block()->zone())
1932 ChecksRelatedToLength(check->length(), checks());
1933 checks_ = length_checks;
1936 length_checks->AddCheck(check, upper_limit);
1940 void InductionVariableData::ChecksRelatedToLength::CloseCurrentBlock() {
1941 if (checks() != NULL) {
1942 InductionVariableCheck* c = checks();
1943 HBasicBlock* current_block = c->check()->block();
1944 while (c != NULL && c->check()->block() == current_block) {
1945 c->set_upper_limit(current_upper_limit_);
1952 void InductionVariableData::ChecksRelatedToLength::UseNewIndexInCurrentBlock(
1957 ASSERT(first_check_in_block() != NULL);
1958 HValue* previous_index = first_check_in_block()->index();
1959 ASSERT(context != NULL);
1961 Zone* zone = index_base->block()->graph()->zone();
1962 set_added_constant(HConstant::New(zone, context, mask));
1963 if (added_index() != NULL) {
1964 added_constant()->InsertBefore(added_index());
1966 added_constant()->InsertBefore(first_check_in_block());
1969 if (added_index() == NULL) {
1970 first_check_in_block()->ReplaceAllUsesWith(first_check_in_block()->index());
1971 HInstruction* new_index = HBitwise::New(zone, context, token, index_base,
1973 ASSERT(new_index->IsBitwise());
1974 new_index->ClearAllSideEffects();
1975 new_index->AssumeRepresentation(Representation::Integer32());
1976 set_added_index(HBitwise::cast(new_index));
1977 added_index()->InsertBefore(first_check_in_block());
1979 ASSERT(added_index()->op() == token);
1981 added_index()->SetOperandAt(1, index_base);
1982 added_index()->SetOperandAt(2, added_constant());
1983 first_check_in_block()->SetOperandAt(0, added_index());
1984 if (previous_index->UseCount() == 0) {
1985 previous_index->DeleteAndReplaceWith(NULL);
1989 void InductionVariableData::ChecksRelatedToLength::AddCheck(
1990 HBoundsCheck* check,
1991 int32_t upper_limit) {
1992 BitwiseDecompositionResult decomposition;
1993 InductionVariableData::DecomposeBitwise(check->index(), &decomposition);
1995 if (first_check_in_block() == NULL ||
1996 first_check_in_block()->block() != check->block()) {
1997 CloseCurrentBlock();
1999 first_check_in_block_ = check;
2000 set_added_index(NULL);
2001 set_added_constant(NULL);
2002 current_and_mask_in_block_ = decomposition.and_mask;
2003 current_or_mask_in_block_ = decomposition.or_mask;
2004 current_upper_limit_ = upper_limit;
2006 InductionVariableCheck* new_check = new(check->block()->graph()->zone())
2007 InductionVariableCheck(check, checks_, upper_limit);
2008 checks_ = new_check;
2012 if (upper_limit > current_upper_limit()) {
2013 current_upper_limit_ = upper_limit;
2016 if (decomposition.and_mask != 0 &&
2017 current_or_mask_in_block() == 0) {
2018 if (current_and_mask_in_block() == 0 ||
2019 decomposition.and_mask > current_and_mask_in_block()) {
2020 UseNewIndexInCurrentBlock(Token::BIT_AND,
2021 decomposition.and_mask,
2023 decomposition.context);
2024 current_and_mask_in_block_ = decomposition.and_mask;
2026 check->set_skip_check();
2028 if (current_and_mask_in_block() == 0) {
2029 if (decomposition.or_mask > current_or_mask_in_block()) {
2030 UseNewIndexInCurrentBlock(Token::BIT_OR,
2031 decomposition.or_mask,
2033 decomposition.context);
2034 current_or_mask_in_block_ = decomposition.or_mask;
2036 check->set_skip_check();
2039 if (!check->skip_check()) {
2040 InductionVariableCheck* new_check = new(check->block()->graph()->zone())
2041 InductionVariableCheck(check, checks_, upper_limit);
2042 checks_ = new_check;
2048 * This method detects if phi is an induction variable, with phi_operand as
2049 * its "incremented" value (the other operand would be the "base" value).
2051 * It cheks is phi_operand has the form "phi + constant".
2052 * If yes, the constant is the increment that the induction variable gets at
2053 * every loop iteration.
2054 * Otherwise it returns 0.
2056 int32_t InductionVariableData::ComputeIncrement(HPhi* phi,
2057 HValue* phi_operand) {
2058 if (!phi_operand->representation().IsInteger32()) return 0;
2060 if (phi_operand->IsAdd()) {
2061 HAdd* operation = HAdd::cast(phi_operand);
2062 if (operation->left() == phi &&
2063 operation->right()->IsInteger32Constant()) {
2064 return operation->right()->GetInteger32Constant();
2065 } else if (operation->right() == phi &&
2066 operation->left()->IsInteger32Constant()) {
2067 return operation->left()->GetInteger32Constant();
2069 } else if (phi_operand->IsSub()) {
2070 HSub* operation = HSub::cast(phi_operand);
2071 if (operation->left() == phi &&
2072 operation->right()->IsInteger32Constant()) {
2073 return -operation->right()->GetInteger32Constant();
2082 * Swaps the information in "update" with the one contained in "this".
2083 * The swapping is important because this method is used while doing a
2084 * dominator tree traversal, and "update" will retain the old data that
2085 * will be restored while backtracking.
2087 void InductionVariableData::UpdateAdditionalLimit(
2088 InductionVariableLimitUpdate* update) {
2089 ASSERT(update->updated_variable == this);
2090 if (update->limit_is_upper) {
2091 swap(&additional_upper_limit_, &update->limit);
2092 swap(&additional_upper_limit_is_included_, &update->limit_is_included);
2094 swap(&additional_lower_limit_, &update->limit);
2095 swap(&additional_lower_limit_is_included_, &update->limit_is_included);
2100 int32_t InductionVariableData::ComputeUpperLimit(int32_t and_mask,
2102 // Should be Smi::kMaxValue but it must fit 32 bits; lower is safe anyway.
2103 const int32_t MAX_LIMIT = 1 << 30;
2105 int32_t result = MAX_LIMIT;
2107 if (limit() != NULL &&
2108 limit()->IsInteger32Constant()) {
2109 int32_t limit_value = limit()->GetInteger32Constant();
2110 if (!limit_included()) {
2113 if (limit_value < result) result = limit_value;
2116 if (additional_upper_limit() != NULL &&
2117 additional_upper_limit()->IsInteger32Constant()) {
2118 int32_t limit_value = additional_upper_limit()->GetInteger32Constant();
2119 if (!additional_upper_limit_is_included()) {
2122 if (limit_value < result) result = limit_value;
2125 if (and_mask > 0 && and_mask < MAX_LIMIT) {
2126 if (and_mask < result) result = and_mask;
2130 // Add the effect of the or_mask.
2133 return result >= MAX_LIMIT ? kNoLimit : result;
2137 HValue* InductionVariableData::IgnoreOsrValue(HValue* v) {
2138 if (!v->IsPhi()) return v;
2139 HPhi* phi = HPhi::cast(v);
2140 if (phi->OperandCount() != 2) return v;
2141 if (phi->OperandAt(0)->block()->is_osr_entry()) {
2142 return phi->OperandAt(1);
2143 } else if (phi->OperandAt(1)->block()->is_osr_entry()) {
2144 return phi->OperandAt(0);
2151 InductionVariableData* InductionVariableData::GetInductionVariableData(
2153 v = IgnoreOsrValue(v);
2155 return HPhi::cast(v)->induction_variable_data();
2162 * Check if a conditional branch to "current_branch" with token "token" is
2163 * the branch that keeps the induction loop running (and, conversely, will
2164 * terminate it if the "other_branch" is taken).
2166 * Three conditions must be met:
2167 * - "current_branch" must be in the induction loop.
2168 * - "other_branch" must be out of the induction loop.
2169 * - "token" and the induction increment must be "compatible": the token should
2170 * be a condition that keeps the execution inside the loop until the limit is
2173 bool InductionVariableData::CheckIfBranchIsLoopGuard(
2175 HBasicBlock* current_branch,
2176 HBasicBlock* other_branch) {
2177 if (!phi()->block()->current_loop()->IsNestedInThisLoop(
2178 current_branch->current_loop())) {
2182 if (phi()->block()->current_loop()->IsNestedInThisLoop(
2183 other_branch->current_loop())) {
2187 if (increment() > 0 && (token == Token::LT || token == Token::LTE)) {
2190 if (increment() < 0 && (token == Token::GT || token == Token::GTE)) {
2193 if (Token::IsInequalityOp(token) && (increment() == 1 || increment() == -1)) {
2201 void InductionVariableData::ComputeLimitFromPredecessorBlock(
2203 LimitFromPredecessorBlock* result) {
2204 if (block->predecessors()->length() != 1) return;
2205 HBasicBlock* predecessor = block->predecessors()->at(0);
2206 HInstruction* end = predecessor->last();
2208 if (!end->IsCompareNumericAndBranch()) return;
2209 HCompareNumericAndBranch* branch = HCompareNumericAndBranch::cast(end);
2211 Token::Value token = branch->token();
2212 if (!Token::IsArithmeticCompareOp(token)) return;
2214 HBasicBlock* other_target;
2215 if (block == branch->SuccessorAt(0)) {
2216 other_target = branch->SuccessorAt(1);
2218 other_target = branch->SuccessorAt(0);
2219 token = Token::NegateCompareOp(token);
2220 ASSERT(block == branch->SuccessorAt(1));
2223 InductionVariableData* data;
2225 data = GetInductionVariableData(branch->left());
2226 HValue* limit = branch->right();
2228 data = GetInductionVariableData(branch->right());
2229 token = Token::ReverseCompareOp(token);
2230 limit = branch->left();
2234 result->variable = data;
2235 result->token = token;
2236 result->limit = limit;
2237 result->other_target = other_target;
2243 * Compute the limit that is imposed on an induction variable when entering
2245 * If the limit is the "proper" induction limit (the one that makes the loop
2246 * terminate when the induction variable reaches it) it is stored directly in
2247 * the induction variable data.
2248 * Otherwise the limit is written in "additional_limit" and the method
2251 bool InductionVariableData::ComputeInductionVariableLimit(
2253 InductionVariableLimitUpdate* additional_limit) {
2254 LimitFromPredecessorBlock limit;
2255 ComputeLimitFromPredecessorBlock(block, &limit);
2256 if (!limit.LimitIsValid()) return false;
2258 if (limit.variable->CheckIfBranchIsLoopGuard(limit.token,
2260 limit.other_target)) {
2261 limit.variable->limit_ = limit.limit;
2262 limit.variable->limit_included_ = limit.LimitIsIncluded();
2263 limit.variable->limit_validity_ = block;
2264 limit.variable->induction_exit_block_ = block->predecessors()->at(0);
2265 limit.variable->induction_exit_target_ = limit.other_target;
2268 additional_limit->updated_variable = limit.variable;
2269 additional_limit->limit = limit.limit;
2270 additional_limit->limit_is_upper = limit.LimitIsUpper();
2271 additional_limit->limit_is_included = limit.LimitIsIncluded();
2277 Range* HMathMinMax::InferRange(Zone* zone) {
2278 if (representation().IsSmiOrInteger32()) {
2279 Range* a = left()->range();
2280 Range* b = right()->range();
2281 Range* res = a->Copy(zone);
2282 if (operation_ == kMathMax) {
2283 res->CombinedMax(b);
2285 ASSERT(operation_ == kMathMin);
2286 res->CombinedMin(b);
2290 return HValue::InferRange(zone);
2295 void HPhi::PrintTo(StringStream* stream) {
2297 for (int i = 0; i < OperandCount(); ++i) {
2298 HValue* value = OperandAt(i);
2300 value->PrintNameTo(stream);
2303 stream->Add(" uses:%d_%ds_%di_%dd_%dt",
2305 smi_non_phi_uses() + smi_indirect_uses(),
2306 int32_non_phi_uses() + int32_indirect_uses(),
2307 double_non_phi_uses() + double_indirect_uses(),
2308 tagged_non_phi_uses() + tagged_indirect_uses());
2309 PrintRangeTo(stream);
2310 PrintTypeTo(stream);
2315 void HPhi::AddInput(HValue* value) {
2316 inputs_.Add(NULL, value->block()->zone());
2317 SetOperandAt(OperandCount() - 1, value);
2318 // Mark phis that may have 'arguments' directly or indirectly as an operand.
2319 if (!CheckFlag(kIsArguments) && value->CheckFlag(kIsArguments)) {
2320 SetFlag(kIsArguments);
2325 bool HPhi::HasRealUses() {
2326 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
2327 if (!it.value()->IsPhi()) return true;
2333 HValue* HPhi::GetRedundantReplacement() {
2334 HValue* candidate = NULL;
2335 int count = OperandCount();
2337 while (position < count && candidate == NULL) {
2338 HValue* current = OperandAt(position++);
2339 if (current != this) candidate = current;
2341 while (position < count) {
2342 HValue* current = OperandAt(position++);
2343 if (current != this && current != candidate) return NULL;
2345 ASSERT(candidate != this);
2350 void HPhi::DeleteFromGraph() {
2351 ASSERT(block() != NULL);
2352 block()->RemovePhi(this);
2353 ASSERT(block() == NULL);
2357 void HPhi::InitRealUses(int phi_id) {
2358 // Initialize real uses.
2360 // Compute a conservative approximation of truncating uses before inferring
2361 // representations. The proper, exact computation will be done later, when
2362 // inserting representation changes.
2363 SetFlag(kTruncatingToSmi);
2364 SetFlag(kTruncatingToInt32);
2365 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
2366 HValue* value = it.value();
2367 if (!value->IsPhi()) {
2368 Representation rep = value->observed_input_representation(it.index());
2369 non_phi_uses_[rep.kind()] += 1;
2370 if (FLAG_trace_representation) {
2371 PrintF("#%d Phi is used by real #%d %s as %s\n",
2372 id(), value->id(), value->Mnemonic(), rep.Mnemonic());
2374 if (!value->IsSimulate()) {
2375 if (!value->CheckFlag(kTruncatingToSmi)) {
2376 ClearFlag(kTruncatingToSmi);
2378 if (!value->CheckFlag(kTruncatingToInt32)) {
2379 ClearFlag(kTruncatingToInt32);
2387 void HPhi::AddNonPhiUsesFrom(HPhi* other) {
2388 if (FLAG_trace_representation) {
2389 PrintF("adding to #%d Phi uses of #%d Phi: s%d i%d d%d t%d\n",
2391 other->non_phi_uses_[Representation::kSmi],
2392 other->non_phi_uses_[Representation::kInteger32],
2393 other->non_phi_uses_[Representation::kDouble],
2394 other->non_phi_uses_[Representation::kTagged]);
2397 for (int i = 0; i < Representation::kNumRepresentations; i++) {
2398 indirect_uses_[i] += other->non_phi_uses_[i];
2403 void HPhi::AddIndirectUsesTo(int* dest) {
2404 for (int i = 0; i < Representation::kNumRepresentations; i++) {
2405 dest[i] += indirect_uses_[i];
2410 void HSimulate::MergeWith(ZoneList<HSimulate*>* list) {
2411 while (!list->is_empty()) {
2412 HSimulate* from = list->RemoveLast();
2413 ZoneList<HValue*>* from_values = &from->values_;
2414 for (int i = 0; i < from_values->length(); ++i) {
2415 if (from->HasAssignedIndexAt(i)) {
2416 int index = from->GetAssignedIndexAt(i);
2417 if (HasValueForIndex(index)) continue;
2418 AddAssignedValue(index, from_values->at(i));
2420 if (pop_count_ > 0) {
2423 AddPushedValue(from_values->at(i));
2427 pop_count_ += from->pop_count_;
2428 from->DeleteAndReplaceWith(NULL);
2433 void HSimulate::PrintDataTo(StringStream* stream) {
2434 stream->Add("id=%d", ast_id().ToInt());
2435 if (pop_count_ > 0) stream->Add(" pop %d", pop_count_);
2436 if (values_.length() > 0) {
2437 if (pop_count_ > 0) stream->Add(" /");
2438 for (int i = values_.length() - 1; i >= 0; --i) {
2439 if (HasAssignedIndexAt(i)) {
2440 stream->Add(" var[%d] = ", GetAssignedIndexAt(i));
2442 stream->Add(" push ");
2444 values_[i]->PrintNameTo(stream);
2445 if (i > 0) stream->Add(",");
2451 void HSimulate::ReplayEnvironment(HEnvironment* env) {
2452 ASSERT(env != NULL);
2453 env->set_ast_id(ast_id());
2454 env->Drop(pop_count());
2455 for (int i = values()->length() - 1; i >= 0; --i) {
2456 HValue* value = values()->at(i);
2457 if (HasAssignedIndexAt(i)) {
2458 env->Bind(GetAssignedIndexAt(i), value);
2466 static void ReplayEnvironmentNested(const ZoneList<HValue*>* values,
2467 HCapturedObject* other) {
2468 for (int i = 0; i < values->length(); ++i) {
2469 HValue* value = values->at(i);
2470 if (value->IsCapturedObject()) {
2471 if (HCapturedObject::cast(value)->capture_id() == other->capture_id()) {
2472 values->at(i) = other;
2474 ReplayEnvironmentNested(HCapturedObject::cast(value)->values(), other);
2481 // Replay captured objects by replacing all captured objects with the
2482 // same capture id in the current and all outer environments.
2483 void HCapturedObject::ReplayEnvironment(HEnvironment* env) {
2484 ASSERT(env != NULL);
2485 while (env != NULL) {
2486 ReplayEnvironmentNested(env->values(), this);
2492 void HCapturedObject::PrintDataTo(StringStream* stream) {
2493 stream->Add("#%d ", capture_id());
2494 HDematerializedObject::PrintDataTo(stream);
2498 void HEnterInlined::RegisterReturnTarget(HBasicBlock* return_target,
2500 ASSERT(return_target->IsInlineReturnTarget());
2501 return_targets_.Add(return_target, zone);
2505 void HEnterInlined::PrintDataTo(StringStream* stream) {
2506 SmartArrayPointer<char> name = function()->debug_name()->ToCString();
2507 stream->Add("%s, id=%d", name.get(), function()->id().ToInt());
2511 static bool IsInteger32(double value) {
2512 double roundtrip_value = static_cast<double>(static_cast<int32_t>(value));
2513 return BitCast<int64_t>(roundtrip_value) == BitCast<int64_t>(value);
2517 HConstant::HConstant(Handle<Object> handle, Representation r)
2518 : HTemplateInstruction<0>(HType::TypeFromValue(handle)),
2519 object_(Unique<Object>::CreateUninitialized(handle)),
2520 has_smi_value_(false),
2521 has_int32_value_(false),
2522 has_double_value_(false),
2523 has_external_reference_value_(false),
2524 is_internalized_string_(false),
2525 is_not_in_new_space_(true),
2527 boolean_value_(handle->BooleanValue()) {
2528 if (handle->IsHeapObject()) {
2529 Heap* heap = Handle<HeapObject>::cast(handle)->GetHeap();
2530 is_not_in_new_space_ = !heap->InNewSpace(*handle);
2532 if (handle->IsNumber()) {
2533 double n = handle->Number();
2534 has_int32_value_ = IsInteger32(n);
2535 int32_value_ = DoubleToInt32(n);
2536 has_smi_value_ = has_int32_value_ && Smi::IsValid(int32_value_);
2538 has_double_value_ = true;
2539 // TODO(titzer): if this heap number is new space, tenure a new one.
2541 is_internalized_string_ = handle->IsInternalizedString();
2544 is_cell_ = !handle.is_null() &&
2545 (handle->IsCell() || handle->IsPropertyCell());
2550 HConstant::HConstant(Unique<Object> unique,
2553 bool is_internalize_string,
2554 bool is_not_in_new_space,
2557 : HTemplateInstruction<0>(type),
2559 has_smi_value_(false),
2560 has_int32_value_(false),
2561 has_double_value_(false),
2562 has_external_reference_value_(false),
2563 is_internalized_string_(is_internalize_string),
2564 is_not_in_new_space_(is_not_in_new_space),
2566 boolean_value_(boolean_value) {
2567 ASSERT(!unique.handle().is_null());
2568 ASSERT(!type.IsTaggedNumber());
2573 HConstant::HConstant(int32_t integer_value,
2575 bool is_not_in_new_space,
2576 Unique<Object> object)
2578 has_smi_value_(Smi::IsValid(integer_value)),
2579 has_int32_value_(true),
2580 has_double_value_(true),
2581 has_external_reference_value_(false),
2582 is_internalized_string_(false),
2583 is_not_in_new_space_(is_not_in_new_space),
2585 boolean_value_(integer_value != 0),
2586 int32_value_(integer_value),
2587 double_value_(FastI2D(integer_value)) {
2588 set_type(has_smi_value_ ? HType::Smi() : HType::TaggedNumber());
2593 HConstant::HConstant(double double_value,
2595 bool is_not_in_new_space,
2596 Unique<Object> object)
2598 has_int32_value_(IsInteger32(double_value)),
2599 has_double_value_(true),
2600 has_external_reference_value_(false),
2601 is_internalized_string_(false),
2602 is_not_in_new_space_(is_not_in_new_space),
2604 boolean_value_(double_value != 0 && !std::isnan(double_value)),
2605 int32_value_(DoubleToInt32(double_value)),
2606 double_value_(double_value) {
2607 has_smi_value_ = has_int32_value_ && Smi::IsValid(int32_value_);
2608 set_type(has_smi_value_ ? HType::Smi() : HType::TaggedNumber());
2613 HConstant::HConstant(ExternalReference reference)
2614 : HTemplateInstruction<0>(HType::None()),
2615 object_(Unique<Object>(Handle<Object>::null())),
2616 has_smi_value_(false),
2617 has_int32_value_(false),
2618 has_double_value_(false),
2619 has_external_reference_value_(true),
2620 is_internalized_string_(false),
2621 is_not_in_new_space_(true),
2623 boolean_value_(true),
2624 external_reference_value_(reference) {
2625 Initialize(Representation::External());
2629 void HConstant::Initialize(Representation r) {
2631 if (has_smi_value_ && SmiValuesAre31Bits()) {
2632 r = Representation::Smi();
2633 } else if (has_int32_value_) {
2634 r = Representation::Integer32();
2635 } else if (has_double_value_) {
2636 r = Representation::Double();
2637 } else if (has_external_reference_value_) {
2638 r = Representation::External();
2640 Handle<Object> object = object_.handle();
2641 if (object->IsJSObject()) {
2642 // Try to eagerly migrate JSObjects that have deprecated maps.
2643 Handle<JSObject> js_object = Handle<JSObject>::cast(object);
2644 if (js_object->map()->is_deprecated()) {
2645 JSObject::TryMigrateInstance(js_object);
2648 r = Representation::Tagged();
2651 set_representation(r);
2656 bool HConstant::ImmortalImmovable() const {
2657 if (has_int32_value_) {
2660 if (has_double_value_) {
2661 if (IsSpecialDouble()) {
2666 if (has_external_reference_value_) {
2670 ASSERT(!object_.handle().is_null());
2671 Heap* heap = isolate()->heap();
2672 ASSERT(!object_.IsKnownGlobal(heap->minus_zero_value()));
2673 ASSERT(!object_.IsKnownGlobal(heap->nan_value()));
2675 #define IMMORTAL_IMMOVABLE_ROOT(name) \
2676 object_.IsKnownGlobal(heap->name()) ||
2677 IMMORTAL_IMMOVABLE_ROOT_LIST(IMMORTAL_IMMOVABLE_ROOT)
2678 #undef IMMORTAL_IMMOVABLE_ROOT
2679 #define INTERNALIZED_STRING(name, value) \
2680 object_.IsKnownGlobal(heap->name()) ||
2681 INTERNALIZED_STRING_LIST(INTERNALIZED_STRING)
2682 #undef INTERNALIZED_STRING
2683 #define STRING_TYPE(NAME, size, name, Name) \
2684 object_.IsKnownGlobal(heap->name##_map()) ||
2685 STRING_TYPE_LIST(STRING_TYPE)
2691 bool HConstant::EmitAtUses() {
2693 if (block()->graph()->has_osr() &&
2694 block()->graph()->IsStandardConstant(this)) {
2695 // TODO(titzer): this seems like a hack that should be fixed by custom OSR.
2698 if (UseCount() == 0) return true;
2699 if (IsCell()) return false;
2700 if (representation().IsDouble()) return false;
2705 HConstant* HConstant::CopyToRepresentation(Representation r, Zone* zone) const {
2706 if (r.IsSmi() && !has_smi_value_) return NULL;
2707 if (r.IsInteger32() && !has_int32_value_) return NULL;
2708 if (r.IsDouble() && !has_double_value_) return NULL;
2709 if (r.IsExternal() && !has_external_reference_value_) return NULL;
2710 if (has_int32_value_) {
2711 return new(zone) HConstant(int32_value_, r, is_not_in_new_space_, object_);
2713 if (has_double_value_) {
2714 return new(zone) HConstant(double_value_, r, is_not_in_new_space_, object_);
2716 if (has_external_reference_value_) {
2717 return new(zone) HConstant(external_reference_value_);
2719 ASSERT(!object_.handle().is_null());
2720 return new(zone) HConstant(object_,
2723 is_internalized_string_,
2724 is_not_in_new_space_,
2730 Maybe<HConstant*> HConstant::CopyToTruncatedInt32(Zone* zone) {
2731 HConstant* res = NULL;
2732 if (has_int32_value_) {
2733 res = new(zone) HConstant(int32_value_,
2734 Representation::Integer32(),
2735 is_not_in_new_space_,
2737 } else if (has_double_value_) {
2738 res = new(zone) HConstant(DoubleToInt32(double_value_),
2739 Representation::Integer32(),
2740 is_not_in_new_space_,
2743 return Maybe<HConstant*>(res != NULL, res);
2747 Maybe<HConstant*> HConstant::CopyToTruncatedNumber(Zone* zone) {
2748 HConstant* res = NULL;
2749 Handle<Object> handle = this->handle(zone->isolate());
2750 if (handle->IsBoolean()) {
2751 res = handle->BooleanValue() ?
2752 new(zone) HConstant(1) : new(zone) HConstant(0);
2753 } else if (handle->IsUndefined()) {
2754 res = new(zone) HConstant(OS::nan_value());
2755 } else if (handle->IsNull()) {
2756 res = new(zone) HConstant(0);
2758 return Maybe<HConstant*>(res != NULL, res);
2762 void HConstant::PrintDataTo(StringStream* stream) {
2763 if (has_int32_value_) {
2764 stream->Add("%d ", int32_value_);
2765 } else if (has_double_value_) {
2766 stream->Add("%f ", FmtElm(double_value_));
2767 } else if (has_external_reference_value_) {
2768 stream->Add("%p ", reinterpret_cast<void*>(
2769 external_reference_value_.address()));
2771 handle(Isolate::Current())->ShortPrint(stream);
2773 if (!is_not_in_new_space_) {
2774 stream->Add("[new space] ");
2779 void HBinaryOperation::PrintDataTo(StringStream* stream) {
2780 left()->PrintNameTo(stream);
2782 right()->PrintNameTo(stream);
2783 if (CheckFlag(kCanOverflow)) stream->Add(" !");
2784 if (CheckFlag(kBailoutOnMinusZero)) stream->Add(" -0?");
2788 void HBinaryOperation::InferRepresentation(HInferRepresentationPhase* h_infer) {
2789 ASSERT(CheckFlag(kFlexibleRepresentation));
2790 Representation new_rep = RepresentationFromInputs();
2791 UpdateRepresentation(new_rep, h_infer, "inputs");
2793 if (representation().IsSmi() && HasNonSmiUse()) {
2794 UpdateRepresentation(
2795 Representation::Integer32(), h_infer, "use requirements");
2798 if (observed_output_representation_.IsNone()) {
2799 new_rep = RepresentationFromUses();
2800 UpdateRepresentation(new_rep, h_infer, "uses");
2802 new_rep = RepresentationFromOutput();
2803 UpdateRepresentation(new_rep, h_infer, "output");
2808 Representation HBinaryOperation::RepresentationFromInputs() {
2809 // Determine the worst case of observed input representations and
2810 // the currently assumed output representation.
2811 Representation rep = representation();
2812 for (int i = 1; i <= 2; ++i) {
2813 rep = rep.generalize(observed_input_representation(i));
2815 // If any of the actual input representation is more general than what we
2816 // have so far but not Tagged, use that representation instead.
2817 Representation left_rep = left()->representation();
2818 Representation right_rep = right()->representation();
2819 if (!left_rep.IsTagged()) rep = rep.generalize(left_rep);
2820 if (!right_rep.IsTagged()) rep = rep.generalize(right_rep);
2826 bool HBinaryOperation::IgnoreObservedOutputRepresentation(
2827 Representation current_rep) {
2828 return ((current_rep.IsInteger32() && CheckUsesForFlag(kTruncatingToInt32)) ||
2829 (current_rep.IsSmi() && CheckUsesForFlag(kTruncatingToSmi))) &&
2830 // Mul in Integer32 mode would be too precise.
2831 (!this->IsMul() || HMul::cast(this)->MulMinusOne());
2835 Representation HBinaryOperation::RepresentationFromOutput() {
2836 Representation rep = representation();
2837 // Consider observed output representation, but ignore it if it's Double,
2838 // this instruction is not a division, and all its uses are truncating
2840 if (observed_output_representation_.is_more_general_than(rep) &&
2841 !IgnoreObservedOutputRepresentation(rep)) {
2842 return observed_output_representation_;
2844 return Representation::None();
2848 void HBinaryOperation::AssumeRepresentation(Representation r) {
2849 set_observed_input_representation(1, r);
2850 set_observed_input_representation(2, r);
2851 HValue::AssumeRepresentation(r);
2855 void HMathMinMax::InferRepresentation(HInferRepresentationPhase* h_infer) {
2856 ASSERT(CheckFlag(kFlexibleRepresentation));
2857 Representation new_rep = RepresentationFromInputs();
2858 UpdateRepresentation(new_rep, h_infer, "inputs");
2859 // Do not care about uses.
2863 Range* HBitwise::InferRange(Zone* zone) {
2864 if (op() == Token::BIT_XOR) {
2865 if (left()->HasRange() && right()->HasRange()) {
2866 // The maximum value has the high bit, and all bits below, set:
2868 // If the range can be negative, the minimum int is a negative number with
2869 // the high bit, and all bits below, unset:
2871 // If it cannot be negative, conservatively choose 0 as minimum int.
2872 int64_t left_upper = left()->range()->upper();
2873 int64_t left_lower = left()->range()->lower();
2874 int64_t right_upper = right()->range()->upper();
2875 int64_t right_lower = right()->range()->lower();
2877 if (left_upper < 0) left_upper = ~left_upper;
2878 if (left_lower < 0) left_lower = ~left_lower;
2879 if (right_upper < 0) right_upper = ~right_upper;
2880 if (right_lower < 0) right_lower = ~right_lower;
2882 int high = MostSignificantBit(
2883 static_cast<uint32_t>(
2884 left_upper | left_lower | right_upper | right_lower));
2888 int32_t min = (left()->range()->CanBeNegative() ||
2889 right()->range()->CanBeNegative())
2890 ? static_cast<int32_t>(-limit) : 0;
2891 return new(zone) Range(min, static_cast<int32_t>(limit - 1));
2893 Range* result = HValue::InferRange(zone);
2894 result->set_can_be_minus_zero(false);
2897 const int32_t kDefaultMask = static_cast<int32_t>(0xffffffff);
2898 int32_t left_mask = (left()->range() != NULL)
2899 ? left()->range()->Mask()
2901 int32_t right_mask = (right()->range() != NULL)
2902 ? right()->range()->Mask()
2904 int32_t result_mask = (op() == Token::BIT_AND)
2905 ? left_mask & right_mask
2906 : left_mask | right_mask;
2907 if (result_mask >= 0) return new(zone) Range(0, result_mask);
2909 Range* result = HValue::InferRange(zone);
2910 result->set_can_be_minus_zero(false);
2915 Range* HSar::InferRange(Zone* zone) {
2916 if (right()->IsConstant()) {
2917 HConstant* c = HConstant::cast(right());
2918 if (c->HasInteger32Value()) {
2919 Range* result = (left()->range() != NULL)
2920 ? left()->range()->Copy(zone)
2921 : new(zone) Range();
2922 result->Sar(c->Integer32Value());
2926 return HValue::InferRange(zone);
2930 Range* HShr::InferRange(Zone* zone) {
2931 if (right()->IsConstant()) {
2932 HConstant* c = HConstant::cast(right());
2933 if (c->HasInteger32Value()) {
2934 int shift_count = c->Integer32Value() & 0x1f;
2935 if (left()->range()->CanBeNegative()) {
2936 // Only compute bounds if the result always fits into an int32.
2937 return (shift_count >= 1)
2938 ? new(zone) Range(0,
2939 static_cast<uint32_t>(0xffffffff) >> shift_count)
2940 : new(zone) Range();
2942 // For positive inputs we can use the >> operator.
2943 Range* result = (left()->range() != NULL)
2944 ? left()->range()->Copy(zone)
2945 : new(zone) Range();
2946 result->Sar(c->Integer32Value());
2951 return HValue::InferRange(zone);
2955 Range* HShl::InferRange(Zone* zone) {
2956 if (right()->IsConstant()) {
2957 HConstant* c = HConstant::cast(right());
2958 if (c->HasInteger32Value()) {
2959 Range* result = (left()->range() != NULL)
2960 ? left()->range()->Copy(zone)
2961 : new(zone) Range();
2962 result->Shl(c->Integer32Value());
2966 return HValue::InferRange(zone);
2970 Range* HLoadNamedField::InferRange(Zone* zone) {
2971 if (access().representation().IsInteger8()) {
2972 return new(zone) Range(kMinInt8, kMaxInt8);
2974 if (access().representation().IsUInteger8()) {
2975 return new(zone) Range(kMinUInt8, kMaxUInt8);
2977 if (access().representation().IsInteger16()) {
2978 return new(zone) Range(kMinInt16, kMaxInt16);
2980 if (access().representation().IsUInteger16()) {
2981 return new(zone) Range(kMinUInt16, kMaxUInt16);
2983 if (access().IsStringLength()) {
2984 return new(zone) Range(0, String::kMaxLength);
2986 return HValue::InferRange(zone);
2990 Range* HLoadKeyed::InferRange(Zone* zone) {
2991 switch (elements_kind()) {
2992 case EXTERNAL_INT8_ELEMENTS:
2993 return new(zone) Range(kMinInt8, kMaxInt8);
2994 case EXTERNAL_UINT8_ELEMENTS:
2995 case EXTERNAL_UINT8_CLAMPED_ELEMENTS:
2996 return new(zone) Range(kMinUInt8, kMaxUInt8);
2997 case EXTERNAL_INT16_ELEMENTS:
2998 return new(zone) Range(kMinInt16, kMaxInt16);
2999 case EXTERNAL_UINT16_ELEMENTS:
3000 return new(zone) Range(kMinUInt16, kMaxUInt16);
3002 return HValue::InferRange(zone);
3007 void HCompareGeneric::PrintDataTo(StringStream* stream) {
3008 stream->Add(Token::Name(token()));
3010 HBinaryOperation::PrintDataTo(stream);
3014 void HStringCompareAndBranch::PrintDataTo(StringStream* stream) {
3015 stream->Add(Token::Name(token()));
3017 HControlInstruction::PrintDataTo(stream);
3021 void HCompareNumericAndBranch::PrintDataTo(StringStream* stream) {
3022 stream->Add(Token::Name(token()));
3024 left()->PrintNameTo(stream);
3026 right()->PrintNameTo(stream);
3027 HControlInstruction::PrintDataTo(stream);
3031 void HCompareObjectEqAndBranch::PrintDataTo(StringStream* stream) {
3032 left()->PrintNameTo(stream);
3034 right()->PrintNameTo(stream);
3035 HControlInstruction::PrintDataTo(stream);
3039 bool HCompareObjectEqAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
3040 if (left()->IsConstant() && right()->IsConstant()) {
3041 bool comparison_result =
3042 HConstant::cast(left())->Equals(HConstant::cast(right()));
3043 *block = comparison_result
3045 : SecondSuccessor();
3053 void HCompareHoleAndBranch::InferRepresentation(
3054 HInferRepresentationPhase* h_infer) {
3055 ChangeRepresentation(value()->representation());
3059 bool HCompareMinusZeroAndBranch::KnownSuccessorBlock(HBasicBlock** block) {
3060 if (value()->representation().IsSmiOrInteger32()) {
3061 // A Smi or Integer32 cannot contain minus zero.
3062 *block = SecondSuccessor();
3070 void HCompareMinusZeroAndBranch::InferRepresentation(
3071 HInferRepresentationPhase* h_infer) {
3072 ChangeRepresentation(value()->representation());
3077 void HGoto::PrintDataTo(StringStream* stream) {
3078 stream->Add("B%d", SuccessorAt(0)->block_id());
3082 void HCompareNumericAndBranch::InferRepresentation(
3083 HInferRepresentationPhase* h_infer) {
3084 Representation left_rep = left()->representation();
3085 Representation right_rep = right()->representation();
3086 Representation observed_left = observed_input_representation(0);
3087 Representation observed_right = observed_input_representation(1);
3089 Representation rep = Representation::None();
3090 rep = rep.generalize(observed_left);
3091 rep = rep.generalize(observed_right);
3092 if (rep.IsNone() || rep.IsSmiOrInteger32()) {
3093 if (!left_rep.IsTagged()) rep = rep.generalize(left_rep);
3094 if (!right_rep.IsTagged()) rep = rep.generalize(right_rep);
3096 rep = Representation::Double();
3099 if (rep.IsDouble()) {
3100 // According to the ES5 spec (11.9.3, 11.8.5), Equality comparisons (==, ===
3101 // and !=) have special handling of undefined, e.g. undefined == undefined
3102 // is 'true'. Relational comparisons have a different semantic, first
3103 // calling ToPrimitive() on their arguments. The standard Crankshaft
3104 // tagged-to-double conversion to ensure the HCompareNumericAndBranch's
3105 // inputs are doubles caused 'undefined' to be converted to NaN. That's
3106 // compatible out-of-the box with ordered relational comparisons (<, >, <=,
3107 // >=). However, for equality comparisons (and for 'in' and 'instanceof'),
3108 // it is not consistent with the spec. For example, it would cause undefined
3109 // == undefined (should be true) to be evaluated as NaN == NaN
3110 // (false). Therefore, any comparisons other than ordered relational
3111 // comparisons must cause a deopt when one of their arguments is undefined.
3113 if (Token::IsOrderedRelationalCompareOp(token_)) {
3114 SetFlag(kAllowUndefinedAsNaN);
3117 ChangeRepresentation(rep);
3121 void HParameter::PrintDataTo(StringStream* stream) {
3122 stream->Add("%u", index());
3126 void HLoadNamedField::PrintDataTo(StringStream* stream) {
3127 object()->PrintNameTo(stream);
3128 access_.PrintTo(stream);
3130 if (HasDependency()) {
3132 dependency()->PrintNameTo(stream);
3137 HCheckMaps* HCheckMaps::New(Zone* zone,
3141 CompilationInfo* info,
3142 HValue* typecheck) {
3143 HCheckMaps* check_map = new(zone) HCheckMaps(value, zone, typecheck);
3144 check_map->Add(map, zone);
3145 if (map->CanOmitMapChecks() &&
3146 value->IsConstant() &&
3147 HConstant::cast(value)->HasMap(map)) {
3148 // TODO(titzer): collect dependent map checks into a list.
3149 check_map->omit_ = true;
3150 if (map->CanTransition()) {
3151 map->AddDependentCompilationInfo(
3152 DependentCode::kPrototypeCheckGroup, info);
3159 void HLoadNamedGeneric::PrintDataTo(StringStream* stream) {
3160 object()->PrintNameTo(stream);
3162 stream->Add(String::cast(*name())->ToCString().get());
3166 void HLoadKeyed::PrintDataTo(StringStream* stream) {
3167 if (!is_external()) {
3168 elements()->PrintNameTo(stream);
3170 ASSERT(elements_kind() >= FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND &&
3171 elements_kind() <= LAST_EXTERNAL_ARRAY_ELEMENTS_KIND);
3172 elements()->PrintNameTo(stream);
3174 stream->Add(ElementsKindToString(elements_kind()));
3178 key()->PrintNameTo(stream);
3179 if (IsDehoisted()) {
3180 stream->Add(" + %d]", index_offset());
3185 if (HasDependency()) {
3187 dependency()->PrintNameTo(stream);
3190 if (RequiresHoleCheck()) {
3191 stream->Add(" check_hole");
3196 bool HLoadKeyed::UsesMustHandleHole() const {
3197 if (IsFastPackedElementsKind(elements_kind())) {
3201 if (IsExternalArrayElementsKind(elements_kind())) {
3205 if (hole_mode() == ALLOW_RETURN_HOLE) {
3206 if (IsFastDoubleElementsKind(elements_kind())) {
3207 return AllUsesCanTreatHoleAsNaN();
3212 if (IsFastDoubleElementsKind(elements_kind())) {
3216 // Holes are only returned as tagged values.
3217 if (!representation().IsTagged()) {
3221 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
3222 HValue* use = it.value();
3223 if (!use->IsChange()) return false;
3230 bool HLoadKeyed::AllUsesCanTreatHoleAsNaN() const {
3231 return IsFastDoubleElementsKind(elements_kind()) &&
3232 CheckUsesForFlag(HValue::kAllowUndefinedAsNaN);
3236 bool HLoadKeyed::RequiresHoleCheck() const {
3237 if (IsFastPackedElementsKind(elements_kind())) {
3241 if (IsExternalArrayElementsKind(elements_kind())) {
3245 return !UsesMustHandleHole();
3249 void HLoadKeyedGeneric::PrintDataTo(StringStream* stream) {
3250 object()->PrintNameTo(stream);
3252 key()->PrintNameTo(stream);
3257 HValue* HLoadKeyedGeneric::Canonicalize() {
3258 // Recognize generic keyed loads that use property name generated
3259 // by for-in statement as a key and rewrite them into fast property load
3261 if (key()->IsLoadKeyed()) {
3262 HLoadKeyed* key_load = HLoadKeyed::cast(key());
3263 if (key_load->elements()->IsForInCacheArray()) {
3264 HForInCacheArray* names_cache =
3265 HForInCacheArray::cast(key_load->elements());
3267 if (names_cache->enumerable() == object()) {
3268 HForInCacheArray* index_cache =
3269 names_cache->index_cache();
3270 HCheckMapValue* map_check =
3271 HCheckMapValue::New(block()->graph()->zone(),
3272 block()->graph()->GetInvalidContext(),
3274 names_cache->map());
3275 HInstruction* index = HLoadKeyed::New(
3276 block()->graph()->zone(),
3277 block()->graph()->GetInvalidContext(),
3281 key_load->elements_kind());
3282 map_check->InsertBefore(this);
3283 index->InsertBefore(this);
3284 return Prepend(new(block()->zone()) HLoadFieldByIndex(
3294 void HStoreNamedGeneric::PrintDataTo(StringStream* stream) {
3295 object()->PrintNameTo(stream);
3297 ASSERT(name()->IsString());
3298 stream->Add(String::cast(*name())->ToCString().get());
3300 value()->PrintNameTo(stream);
3304 void HStoreNamedField::PrintDataTo(StringStream* stream) {
3305 object()->PrintNameTo(stream);
3306 access_.PrintTo(stream);
3308 value()->PrintNameTo(stream);
3309 if (NeedsWriteBarrier()) {
3310 stream->Add(" (write-barrier)");
3312 if (has_transition()) {
3313 stream->Add(" (transition map %p)", *transition_map());
3318 void HStoreKeyed::PrintDataTo(StringStream* stream) {
3319 if (!is_external()) {
3320 elements()->PrintNameTo(stream);
3322 elements()->PrintNameTo(stream);
3324 stream->Add(ElementsKindToString(elements_kind()));
3325 ASSERT(elements_kind() >= FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND &&
3326 elements_kind() <= LAST_EXTERNAL_ARRAY_ELEMENTS_KIND);
3330 key()->PrintNameTo(stream);
3331 if (IsDehoisted()) {
3332 stream->Add(" + %d] = ", index_offset());
3334 stream->Add("] = ");
3337 value()->PrintNameTo(stream);
3341 void HStoreKeyedGeneric::PrintDataTo(StringStream* stream) {
3342 object()->PrintNameTo(stream);
3344 key()->PrintNameTo(stream);
3345 stream->Add("] = ");
3346 value()->PrintNameTo(stream);
3350 void HTransitionElementsKind::PrintDataTo(StringStream* stream) {
3351 object()->PrintNameTo(stream);
3352 ElementsKind from_kind = original_map().handle()->elements_kind();
3353 ElementsKind to_kind = transitioned_map().handle()->elements_kind();
3354 stream->Add(" %p [%s] -> %p [%s]",
3355 *original_map().handle(),
3356 ElementsAccessor::ForKind(from_kind)->name(),
3357 *transitioned_map().handle(),
3358 ElementsAccessor::ForKind(to_kind)->name());
3359 if (IsSimpleMapChangeTransition(from_kind, to_kind)) stream->Add(" (simple)");
3363 void HLoadGlobalCell::PrintDataTo(StringStream* stream) {
3364 stream->Add("[%p]", *cell().handle());
3365 if (!details_.IsDontDelete()) stream->Add(" (deleteable)");
3366 if (details_.IsReadOnly()) stream->Add(" (read-only)");
3370 bool HLoadGlobalCell::RequiresHoleCheck() const {
3371 if (details_.IsDontDelete() && !details_.IsReadOnly()) return false;
3372 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
3373 HValue* use = it.value();
3374 if (!use->IsChange()) return true;
3380 void HLoadGlobalGeneric::PrintDataTo(StringStream* stream) {
3381 stream->Add("%o ", *name());
3385 void HInnerAllocatedObject::PrintDataTo(StringStream* stream) {
3386 base_object()->PrintNameTo(stream);
3387 stream->Add(" offset %d", offset());
3391 void HStoreGlobalCell::PrintDataTo(StringStream* stream) {
3392 stream->Add("[%p] = ", *cell().handle());
3393 value()->PrintNameTo(stream);
3394 if (!details_.IsDontDelete()) stream->Add(" (deleteable)");
3395 if (details_.IsReadOnly()) stream->Add(" (read-only)");
3399 void HLoadContextSlot::PrintDataTo(StringStream* stream) {
3400 value()->PrintNameTo(stream);
3401 stream->Add("[%d]", slot_index());
3405 void HStoreContextSlot::PrintDataTo(StringStream* stream) {
3406 context()->PrintNameTo(stream);
3407 stream->Add("[%d] = ", slot_index());
3408 value()->PrintNameTo(stream);
3412 // Implementation of type inference and type conversions. Calculates
3413 // the inferred type of this instruction based on the input operands.
3415 HType HValue::CalculateInferredType() {
3420 HType HPhi::CalculateInferredType() {
3421 if (OperandCount() == 0) return HType::Tagged();
3422 HType result = OperandAt(0)->type();
3423 for (int i = 1; i < OperandCount(); ++i) {
3424 HType current = OperandAt(i)->type();
3425 result = result.Combine(current);
3431 HType HChange::CalculateInferredType() {
3432 if (from().IsDouble() && to().IsTagged()) return HType::HeapNumber();
3437 Representation HUnaryMathOperation::RepresentationFromInputs() {
3438 Representation rep = representation();
3439 // If any of the actual input representation is more general than what we
3440 // have so far but not Tagged, use that representation instead.
3441 Representation input_rep = value()->representation();
3442 if (!input_rep.IsTagged()) {
3443 rep = rep.generalize(input_rep);
3449 bool HAllocate::HandleSideEffectDominator(GVNFlag side_effect,
3450 HValue* dominator) {
3451 ASSERT(side_effect == kChangesNewSpacePromotion);
3452 Zone* zone = block()->zone();
3453 if (!FLAG_use_allocation_folding) return false;
3455 // Try to fold allocations together with their dominating allocations.
3456 if (!dominator->IsAllocate()) {
3457 if (FLAG_trace_allocation_folding) {
3458 PrintF("#%d (%s) cannot fold into #%d (%s)\n",
3459 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3464 HAllocate* dominator_allocate = HAllocate::cast(dominator);
3465 HValue* dominator_size = dominator_allocate->size();
3466 HValue* current_size = size();
3468 // TODO(hpayer): Add support for non-constant allocation in dominator.
3469 if (!current_size->IsInteger32Constant() ||
3470 !dominator_size->IsInteger32Constant()) {
3471 if (FLAG_trace_allocation_folding) {
3472 PrintF("#%d (%s) cannot fold into #%d (%s), dynamic allocation size\n",
3473 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3478 dominator_allocate = GetFoldableDominator(dominator_allocate);
3479 if (dominator_allocate == NULL) {
3483 ASSERT((IsNewSpaceAllocation() &&
3484 dominator_allocate->IsNewSpaceAllocation()) ||
3485 (IsOldDataSpaceAllocation() &&
3486 dominator_allocate->IsOldDataSpaceAllocation()) ||
3487 (IsOldPointerSpaceAllocation() &&
3488 dominator_allocate->IsOldPointerSpaceAllocation()));
3490 // First update the size of the dominator allocate instruction.
3491 dominator_size = dominator_allocate->size();
3492 int32_t original_object_size =
3493 HConstant::cast(dominator_size)->GetInteger32Constant();
3494 int32_t dominator_size_constant = original_object_size;
3495 int32_t current_size_constant =
3496 HConstant::cast(current_size)->GetInteger32Constant();
3497 int32_t new_dominator_size = dominator_size_constant + current_size_constant;
3499 if (MustAllocateDoubleAligned()) {
3500 if (!dominator_allocate->MustAllocateDoubleAligned()) {
3501 dominator_allocate->MakeDoubleAligned();
3503 if ((dominator_size_constant & kDoubleAlignmentMask) != 0) {
3504 dominator_size_constant += kDoubleSize / 2;
3505 new_dominator_size += kDoubleSize / 2;
3509 // Since we clear the first word after folded memory, we cannot use the
3510 // whole Page::kMaxRegularHeapObjectSize memory.
3511 if (new_dominator_size > Page::kMaxRegularHeapObjectSize - kPointerSize) {
3512 if (FLAG_trace_allocation_folding) {
3513 PrintF("#%d (%s) cannot fold into #%d (%s) due to size: %d\n",
3514 id(), Mnemonic(), dominator_allocate->id(),
3515 dominator_allocate->Mnemonic(), new_dominator_size);
3520 HInstruction* new_dominator_size_constant = HConstant::CreateAndInsertBefore(
3524 Representation::None(),
3525 dominator_allocate);
3526 dominator_allocate->UpdateSize(new_dominator_size_constant);
3529 if (FLAG_verify_heap && dominator_allocate->IsNewSpaceAllocation()) {
3530 dominator_allocate->MakePrefillWithFiller();
3532 // TODO(hpayer): This is a short-term hack to make allocation mementos
3533 // work again in new space.
3534 dominator_allocate->ClearNextMapWord(original_object_size);
3537 // TODO(hpayer): This is a short-term hack to make allocation mementos
3538 // work again in new space.
3539 dominator_allocate->ClearNextMapWord(original_object_size);
3542 dominator_allocate->UpdateClearNextMapWord(MustClearNextMapWord());
3544 // After that replace the dominated allocate instruction.
3545 HInstruction* inner_offset = HConstant::CreateAndInsertBefore(
3548 dominator_size_constant,
3549 Representation::None(),
3552 HInstruction* dominated_allocate_instr =
3553 HInnerAllocatedObject::New(zone,
3558 dominated_allocate_instr->InsertBefore(this);
3559 DeleteAndReplaceWith(dominated_allocate_instr);
3560 if (FLAG_trace_allocation_folding) {
3561 PrintF("#%d (%s) folded into #%d (%s)\n",
3562 id(), Mnemonic(), dominator_allocate->id(),
3563 dominator_allocate->Mnemonic());
3569 HAllocate* HAllocate::GetFoldableDominator(HAllocate* dominator) {
3570 if (!IsFoldable(dominator)) {
3571 // We cannot hoist old space allocations over new space allocations.
3572 if (IsNewSpaceAllocation() || dominator->IsNewSpaceAllocation()) {
3573 if (FLAG_trace_allocation_folding) {
3574 PrintF("#%d (%s) cannot fold into #%d (%s), new space hoisting\n",
3575 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3580 HAllocate* dominator_dominator = dominator->dominating_allocate_;
3582 // We can hoist old data space allocations over an old pointer space
3583 // allocation and vice versa. For that we have to check the dominator
3584 // of the dominator allocate instruction.
3585 if (dominator_dominator == NULL) {
3586 dominating_allocate_ = dominator;
3587 if (FLAG_trace_allocation_folding) {
3588 PrintF("#%d (%s) cannot fold into #%d (%s), different spaces\n",
3589 id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
3594 // We can just fold old space allocations that are in the same basic block,
3595 // since it is not guaranteed that we fill up the whole allocated old
3597 // TODO(hpayer): Remove this limitation and add filler maps for each each
3598 // allocation as soon as we have store elimination.
3599 if (block()->block_id() != dominator_dominator->block()->block_id()) {
3600 if (FLAG_trace_allocation_folding) {
3601 PrintF("#%d (%s) cannot fold into #%d (%s), different basic blocks\n",
3602 id(), Mnemonic(), dominator_dominator->id(),
3603 dominator_dominator->Mnemonic());
3608 ASSERT((IsOldDataSpaceAllocation() &&
3609 dominator_dominator->IsOldDataSpaceAllocation()) ||
3610 (IsOldPointerSpaceAllocation() &&
3611 dominator_dominator->IsOldPointerSpaceAllocation()));
3613 int32_t current_size = HConstant::cast(size())->GetInteger32Constant();
3614 HStoreNamedField* dominator_free_space_size =
3615 dominator->filler_free_space_size_;
3616 if (dominator_free_space_size != NULL) {
3617 // We already hoisted one old space allocation, i.e., we already installed
3618 // a filler map. Hence, we just have to update the free space size.
3619 dominator->UpdateFreeSpaceFiller(current_size);
3621 // This is the first old space allocation that gets hoisted. We have to
3622 // install a filler map since the follwing allocation may cause a GC.
3623 dominator->CreateFreeSpaceFiller(current_size);
3626 // We can hoist the old space allocation over the actual dominator.
3627 return dominator_dominator;
3633 void HAllocate::UpdateFreeSpaceFiller(int32_t free_space_size) {
3634 ASSERT(filler_free_space_size_ != NULL);
3635 Zone* zone = block()->zone();
3636 // We must explicitly force Smi representation here because on x64 we
3637 // would otherwise automatically choose int32, but the actual store
3638 // requires a Smi-tagged value.
3639 HConstant* new_free_space_size = HConstant::CreateAndInsertBefore(
3642 filler_free_space_size_->value()->GetInteger32Constant() +
3644 Representation::Smi(),
3645 filler_free_space_size_);
3646 filler_free_space_size_->UpdateValue(new_free_space_size);
3650 void HAllocate::CreateFreeSpaceFiller(int32_t free_space_size) {
3651 ASSERT(filler_free_space_size_ == NULL);
3652 Zone* zone = block()->zone();
3653 HInstruction* free_space_instr =
3654 HInnerAllocatedObject::New(zone, context(), dominating_allocate_,
3655 dominating_allocate_->size(), type());
3656 free_space_instr->InsertBefore(this);
3657 HConstant* filler_map = HConstant::New(
3660 isolate()->factory()->free_space_map());
3661 filler_map->FinalizeUniqueness(); // TODO(titzer): should be init'd a'ready
3662 filler_map->InsertAfter(free_space_instr);
3663 HInstruction* store_map = HStoreNamedField::New(zone, context(),
3664 free_space_instr, HObjectAccess::ForMap(), filler_map);
3665 store_map->SetFlag(HValue::kHasNoObservableSideEffects);
3666 store_map->InsertAfter(filler_map);
3668 // We must explicitly force Smi representation here because on x64 we
3669 // would otherwise automatically choose int32, but the actual store
3670 // requires a Smi-tagged value.
3671 HConstant* filler_size = HConstant::CreateAndInsertAfter(
3672 zone, context(), free_space_size, Representation::Smi(), store_map);
3673 // Must force Smi representation for x64 (see comment above).
3674 HObjectAccess access =
3675 HObjectAccess::ForMapAndOffset(isolate()->factory()->free_space_map(),
3676 FreeSpace::kSizeOffset,
3677 Representation::Smi());
3678 HStoreNamedField* store_size = HStoreNamedField::New(zone, context(),
3679 free_space_instr, access, filler_size);
3680 store_size->SetFlag(HValue::kHasNoObservableSideEffects);
3681 store_size->InsertAfter(filler_size);
3682 filler_free_space_size_ = store_size;
3686 void HAllocate::ClearNextMapWord(int offset) {
3687 if (MustClearNextMapWord()) {
3688 Zone* zone = block()->zone();
3689 HObjectAccess access =
3690 HObjectAccess::ForObservableJSObjectOffset(offset);
3691 HStoreNamedField* clear_next_map =
3692 HStoreNamedField::New(zone, context(), this, access,
3693 block()->graph()->GetConstant0());
3694 clear_next_map->ClearAllSideEffects();
3695 clear_next_map->InsertAfter(this);
3700 void HAllocate::PrintDataTo(StringStream* stream) {
3701 size()->PrintNameTo(stream);
3703 if (IsNewSpaceAllocation()) stream->Add("N");
3704 if (IsOldPointerSpaceAllocation()) stream->Add("P");
3705 if (IsOldDataSpaceAllocation()) stream->Add("D");
3706 if (MustAllocateDoubleAligned()) stream->Add("A");
3707 if (MustPrefillWithFiller()) stream->Add("F");
3712 HValue* HUnaryMathOperation::EnsureAndPropagateNotMinusZero(
3713 BitVector* visited) {
3715 if (representation().IsSmiOrInteger32() &&
3716 !value()->representation().Equals(representation())) {
3717 if (value()->range() == NULL || value()->range()->CanBeMinusZero()) {
3718 SetFlag(kBailoutOnMinusZero);
3721 if (RequiredInputRepresentation(0).IsSmiOrInteger32() &&
3722 representation().Equals(RequiredInputRepresentation(0))) {
3729 HValue* HChange::EnsureAndPropagateNotMinusZero(BitVector* visited) {
3731 if (from().IsSmiOrInteger32()) return NULL;
3732 if (CanTruncateToInt32()) return NULL;
3733 if (value()->range() == NULL || value()->range()->CanBeMinusZero()) {
3734 SetFlag(kBailoutOnMinusZero);
3736 ASSERT(!from().IsSmiOrInteger32() || !to().IsSmiOrInteger32());
3741 HValue* HForceRepresentation::EnsureAndPropagateNotMinusZero(
3742 BitVector* visited) {
3748 HValue* HMod::EnsureAndPropagateNotMinusZero(BitVector* visited) {
3750 if (range() == NULL || range()->CanBeMinusZero()) {
3751 SetFlag(kBailoutOnMinusZero);
3758 HValue* HDiv::EnsureAndPropagateNotMinusZero(BitVector* visited) {
3760 if (range() == NULL || range()->CanBeMinusZero()) {
3761 SetFlag(kBailoutOnMinusZero);
3767 HValue* HMathFloorOfDiv::EnsureAndPropagateNotMinusZero(BitVector* visited) {
3769 SetFlag(kBailoutOnMinusZero);
3774 HValue* HMul::EnsureAndPropagateNotMinusZero(BitVector* visited) {
3776 if (range() == NULL || range()->CanBeMinusZero()) {
3777 SetFlag(kBailoutOnMinusZero);
3783 HValue* HSub::EnsureAndPropagateNotMinusZero(BitVector* visited) {
3785 // Propagate to the left argument. If the left argument cannot be -0, then
3786 // the result of the add operation cannot be either.
3787 if (range() == NULL || range()->CanBeMinusZero()) {
3794 HValue* HAdd::EnsureAndPropagateNotMinusZero(BitVector* visited) {
3796 // Propagate to the left argument. If the left argument cannot be -0, then
3797 // the result of the sub operation cannot be either.
3798 if (range() == NULL || range()->CanBeMinusZero()) {
3805 bool HStoreKeyed::NeedsCanonicalization() {
3806 // If value is an integer or smi or comes from the result of a keyed load or
3807 // constant then it is either be a non-hole value or in the case of a constant
3808 // the hole is only being stored explicitly: no need for canonicalization.
3810 // The exception to that is keyed loads from external float or double arrays:
3811 // these can load arbitrary representation of NaN.
3813 if (value()->IsConstant()) {
3817 if (value()->IsLoadKeyed()) {
3818 return IsExternalFloatOrDoubleElementsKind(
3819 HLoadKeyed::cast(value())->elements_kind());
3822 if (value()->IsChange()) {
3823 if (HChange::cast(value())->from().IsSmiOrInteger32()) {
3826 if (HChange::cast(value())->value()->type().IsSmi()) {
3834 #define H_CONSTANT_INT(val) \
3835 HConstant::New(zone, context, static_cast<int32_t>(val))
3836 #define H_CONSTANT_DOUBLE(val) \
3837 HConstant::New(zone, context, static_cast<double>(val))
3839 #define DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HInstr, op) \
3840 HInstruction* HInstr::New( \
3841 Zone* zone, HValue* context, HValue* left, HValue* right) { \
3842 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { \
3843 HConstant* c_left = HConstant::cast(left); \
3844 HConstant* c_right = HConstant::cast(right); \
3845 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) { \
3846 double double_res = c_left->DoubleValue() op c_right->DoubleValue(); \
3847 if (IsInt32Double(double_res)) { \
3848 return H_CONSTANT_INT(double_res); \
3850 return H_CONSTANT_DOUBLE(double_res); \
3853 return new(zone) HInstr(context, left, right); \
3857 DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HAdd, +)
3858 DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HMul, *)
3859 DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR(HSub, -)
3861 #undef DEFINE_NEW_H_SIMPLE_ARITHMETIC_INSTR
3864 HInstruction* HStringAdd::New(Zone* zone,
3868 PretenureFlag pretenure_flag,
3869 StringAddFlags flags,
3870 Handle<AllocationSite> allocation_site) {
3871 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
3872 HConstant* c_right = HConstant::cast(right);
3873 HConstant* c_left = HConstant::cast(left);
3874 if (c_left->HasStringValue() && c_right->HasStringValue()) {
3875 Handle<String> concat = zone->isolate()->factory()->NewFlatConcatString(
3876 c_left->StringValue(), c_right->StringValue());
3877 return HConstant::New(zone, context, concat);
3880 return new(zone) HStringAdd(
3881 context, left, right, pretenure_flag, flags, allocation_site);
3885 void HStringAdd::PrintDataTo(StringStream* stream) {
3886 if ((flags() & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_BOTH) {
3887 stream->Add("_CheckBoth");
3888 } else if ((flags() & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_LEFT) {
3889 stream->Add("_CheckLeft");
3890 } else if ((flags() & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_RIGHT) {
3891 stream->Add("_CheckRight");
3894 if (pretenure_flag() == NOT_TENURED) stream->Add("N");
3895 else if (pretenure_flag() == TENURED) stream->Add("D");
3900 HInstruction* HStringCharFromCode::New(
3901 Zone* zone, HValue* context, HValue* char_code) {
3902 if (FLAG_fold_constants && char_code->IsConstant()) {
3903 HConstant* c_code = HConstant::cast(char_code);
3904 Isolate* isolate = zone->isolate();
3905 if (c_code->HasNumberValue()) {
3906 if (std::isfinite(c_code->DoubleValue())) {
3907 uint32_t code = c_code->NumberValueAsInteger32() & 0xffff;
3908 return HConstant::New(zone, context,
3909 LookupSingleCharacterStringFromCode(isolate, code));
3911 return HConstant::New(zone, context, isolate->factory()->empty_string());
3914 return new(zone) HStringCharFromCode(context, char_code);
3918 HInstruction* HUnaryMathOperation::New(
3919 Zone* zone, HValue* context, HValue* value, BuiltinFunctionId op) {
3921 if (!FLAG_fold_constants) break;
3922 if (!value->IsConstant()) break;
3923 HConstant* constant = HConstant::cast(value);
3924 if (!constant->HasNumberValue()) break;
3925 double d = constant->DoubleValue();
3926 if (std::isnan(d)) { // NaN poisons everything.
3927 return H_CONSTANT_DOUBLE(OS::nan_value());
3929 if (std::isinf(d)) { // +Infinity and -Infinity.
3932 return H_CONSTANT_DOUBLE((d > 0.0) ? d : 0.0);
3935 return H_CONSTANT_DOUBLE((d > 0.0) ? d : OS::nan_value());
3938 return H_CONSTANT_DOUBLE((d > 0.0) ? d : -d);
3941 return H_CONSTANT_DOUBLE(d);
3949 return H_CONSTANT_DOUBLE(fast_exp(d));
3951 return H_CONSTANT_DOUBLE(std::log(d));
3953 return H_CONSTANT_DOUBLE(fast_sqrt(d));
3955 return H_CONSTANT_DOUBLE(power_double_double(d, 0.5));
3957 return H_CONSTANT_DOUBLE((d >= 0.0) ? d + 0.0 : -d);
3959 // -0.5 .. -0.0 round to -0.0.
3960 if ((d >= -0.5 && Double(d).Sign() < 0)) return H_CONSTANT_DOUBLE(-0.0);
3961 // Doubles are represented as Significant * 2 ^ Exponent. If the
3962 // Exponent is not negative, the double value is already an integer.
3963 if (Double(d).Exponent() >= 0) return H_CONSTANT_DOUBLE(d);
3964 return H_CONSTANT_DOUBLE(std::floor(d + 0.5));
3966 return H_CONSTANT_DOUBLE(std::floor(d));
3972 return new(zone) HUnaryMathOperation(context, value, op);
3976 HInstruction* HPower::New(Zone* zone,
3980 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
3981 HConstant* c_left = HConstant::cast(left);
3982 HConstant* c_right = HConstant::cast(right);
3983 if (c_left->HasNumberValue() && c_right->HasNumberValue()) {
3984 double result = power_helper(c_left->DoubleValue(),
3985 c_right->DoubleValue());
3986 return H_CONSTANT_DOUBLE(std::isnan(result) ? OS::nan_value() : result);
3989 return new(zone) HPower(left, right);
3993 HInstruction* HMathMinMax::New(
3994 Zone* zone, HValue* context, HValue* left, HValue* right, Operation op) {
3995 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
3996 HConstant* c_left = HConstant::cast(left);
3997 HConstant* c_right = HConstant::cast(right);
3998 if (c_left->HasNumberValue() && c_right->HasNumberValue()) {
3999 double d_left = c_left->DoubleValue();
4000 double d_right = c_right->DoubleValue();
4001 if (op == kMathMin) {
4002 if (d_left > d_right) return H_CONSTANT_DOUBLE(d_right);
4003 if (d_left < d_right) return H_CONSTANT_DOUBLE(d_left);
4004 if (d_left == d_right) {
4005 // Handle +0 and -0.
4006 return H_CONSTANT_DOUBLE((Double(d_left).Sign() == -1) ? d_left
4010 if (d_left < d_right) return H_CONSTANT_DOUBLE(d_right);
4011 if (d_left > d_right) return H_CONSTANT_DOUBLE(d_left);
4012 if (d_left == d_right) {
4013 // Handle +0 and -0.
4014 return H_CONSTANT_DOUBLE((Double(d_left).Sign() == -1) ? d_right
4018 // All comparisons failed, must be NaN.
4019 return H_CONSTANT_DOUBLE(OS::nan_value());
4022 return new(zone) HMathMinMax(context, left, right, op);
4026 HInstruction* HMod::New(Zone* zone,
4030 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4031 HConstant* c_left = HConstant::cast(left);
4032 HConstant* c_right = HConstant::cast(right);
4033 if (c_left->HasInteger32Value() && c_right->HasInteger32Value()) {
4034 int32_t dividend = c_left->Integer32Value();
4035 int32_t divisor = c_right->Integer32Value();
4036 if (dividend == kMinInt && divisor == -1) {
4037 return H_CONSTANT_DOUBLE(-0.0);
4040 int32_t res = dividend % divisor;
4041 if ((res == 0) && (dividend < 0)) {
4042 return H_CONSTANT_DOUBLE(-0.0);
4044 return H_CONSTANT_INT(res);
4048 return new(zone) HMod(context, left, right);
4052 HInstruction* HDiv::New(
4053 Zone* zone, HValue* context, HValue* left, HValue* right) {
4054 // If left and right are constant values, try to return a constant value.
4055 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4056 HConstant* c_left = HConstant::cast(left);
4057 HConstant* c_right = HConstant::cast(right);
4058 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) {
4059 if (c_right->DoubleValue() != 0) {
4060 double double_res = c_left->DoubleValue() / c_right->DoubleValue();
4061 if (IsInt32Double(double_res)) {
4062 return H_CONSTANT_INT(double_res);
4064 return H_CONSTANT_DOUBLE(double_res);
4066 int sign = Double(c_left->DoubleValue()).Sign() *
4067 Double(c_right->DoubleValue()).Sign(); // Right could be -0.
4068 return H_CONSTANT_DOUBLE(sign * V8_INFINITY);
4072 return new(zone) HDiv(context, left, right);
4076 HInstruction* HBitwise::New(
4077 Zone* zone, HValue* context, Token::Value op, HValue* left, HValue* right) {
4078 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4079 HConstant* c_left = HConstant::cast(left);
4080 HConstant* c_right = HConstant::cast(right);
4081 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) {
4083 int32_t v_left = c_left->NumberValueAsInteger32();
4084 int32_t v_right = c_right->NumberValueAsInteger32();
4086 case Token::BIT_XOR:
4087 result = v_left ^ v_right;
4089 case Token::BIT_AND:
4090 result = v_left & v_right;
4093 result = v_left | v_right;
4096 result = 0; // Please the compiler.
4099 return H_CONSTANT_INT(result);
4102 return new(zone) HBitwise(context, op, left, right);
4106 #define DEFINE_NEW_H_BITWISE_INSTR(HInstr, result) \
4107 HInstruction* HInstr::New( \
4108 Zone* zone, HValue* context, HValue* left, HValue* right) { \
4109 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) { \
4110 HConstant* c_left = HConstant::cast(left); \
4111 HConstant* c_right = HConstant::cast(right); \
4112 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) { \
4113 return H_CONSTANT_INT(result); \
4116 return new(zone) HInstr(context, left, right); \
4120 DEFINE_NEW_H_BITWISE_INSTR(HSar,
4121 c_left->NumberValueAsInteger32() >> (c_right->NumberValueAsInteger32() & 0x1f))
4122 DEFINE_NEW_H_BITWISE_INSTR(HShl,
4123 c_left->NumberValueAsInteger32() << (c_right->NumberValueAsInteger32() & 0x1f))
4125 #undef DEFINE_NEW_H_BITWISE_INSTR
4128 HInstruction* HShr::New(
4129 Zone* zone, HValue* context, HValue* left, HValue* right) {
4130 if (FLAG_fold_constants && left->IsConstant() && right->IsConstant()) {
4131 HConstant* c_left = HConstant::cast(left);
4132 HConstant* c_right = HConstant::cast(right);
4133 if ((c_left->HasNumberValue() && c_right->HasNumberValue())) {
4134 int32_t left_val = c_left->NumberValueAsInteger32();
4135 int32_t right_val = c_right->NumberValueAsInteger32() & 0x1f;
4136 if ((right_val == 0) && (left_val < 0)) {
4137 return H_CONSTANT_DOUBLE(static_cast<uint32_t>(left_val));
4139 return H_CONSTANT_INT(static_cast<uint32_t>(left_val) >> right_val);
4142 return new(zone) HShr(context, left, right);
4146 HInstruction* HSeqStringGetChar::New(Zone* zone,
4148 String::Encoding encoding,
4151 if (FLAG_fold_constants && string->IsConstant() && index->IsConstant()) {
4152 HConstant* c_string = HConstant::cast(string);
4153 HConstant* c_index = HConstant::cast(index);
4154 if (c_string->HasStringValue() && c_index->HasInteger32Value()) {
4155 Handle<String> s = c_string->StringValue();
4156 int32_t i = c_index->Integer32Value();
4158 ASSERT_LT(i, s->length());
4159 return H_CONSTANT_INT(s->Get(i));
4162 return new(zone) HSeqStringGetChar(encoding, string, index);
4166 #undef H_CONSTANT_INT
4167 #undef H_CONSTANT_DOUBLE
4170 void HBitwise::PrintDataTo(StringStream* stream) {
4171 stream->Add(Token::Name(op_));
4173 HBitwiseBinaryOperation::PrintDataTo(stream);
4177 void HPhi::SimplifyConstantInputs() {
4178 // Convert constant inputs to integers when all uses are truncating.
4179 // This must happen before representation inference takes place.
4180 if (!CheckUsesForFlag(kTruncatingToInt32)) return;
4181 for (int i = 0; i < OperandCount(); ++i) {
4182 if (!OperandAt(i)->IsConstant()) return;
4184 HGraph* graph = block()->graph();
4185 for (int i = 0; i < OperandCount(); ++i) {
4186 HConstant* operand = HConstant::cast(OperandAt(i));
4187 if (operand->HasInteger32Value()) {
4189 } else if (operand->HasDoubleValue()) {
4190 HConstant* integer_input =
4191 HConstant::New(graph->zone(), graph->GetInvalidContext(),
4192 DoubleToInt32(operand->DoubleValue()));
4193 integer_input->InsertAfter(operand);
4194 SetOperandAt(i, integer_input);
4195 } else if (operand->HasBooleanValue()) {
4196 SetOperandAt(i, operand->BooleanValue() ? graph->GetConstant1()
4197 : graph->GetConstant0());
4198 } else if (operand->ImmortalImmovable()) {
4199 SetOperandAt(i, graph->GetConstant0());
4202 // Overwrite observed input representations because they are likely Tagged.
4203 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
4204 HValue* use = it.value();
4205 if (use->IsBinaryOperation()) {
4206 HBinaryOperation::cast(use)->set_observed_input_representation(
4207 it.index(), Representation::Smi());
4213 void HPhi::InferRepresentation(HInferRepresentationPhase* h_infer) {
4214 ASSERT(CheckFlag(kFlexibleRepresentation));
4215 Representation new_rep = RepresentationFromInputs();
4216 UpdateRepresentation(new_rep, h_infer, "inputs");
4217 new_rep = RepresentationFromUses();
4218 UpdateRepresentation(new_rep, h_infer, "uses");
4219 new_rep = RepresentationFromUseRequirements();
4220 UpdateRepresentation(new_rep, h_infer, "use requirements");
4224 Representation HPhi::RepresentationFromInputs() {
4225 Representation r = Representation::None();
4226 for (int i = 0; i < OperandCount(); ++i) {
4227 r = r.generalize(OperandAt(i)->KnownOptimalRepresentation());
4233 // Returns a representation if all uses agree on the same representation.
4234 // Integer32 is also returned when some uses are Smi but others are Integer32.
4235 Representation HValue::RepresentationFromUseRequirements() {
4236 Representation rep = Representation::None();
4237 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
4238 // Ignore the use requirement from never run code
4239 if (it.value()->block()->IsUnreachable()) continue;
4241 // We check for observed_input_representation elsewhere.
4242 Representation use_rep =
4243 it.value()->RequiredInputRepresentation(it.index());
4248 if (use_rep.IsNone() || rep.Equals(use_rep)) continue;
4249 if (rep.generalize(use_rep).IsInteger32()) {
4250 rep = Representation::Integer32();
4253 return Representation::None();
4259 bool HValue::HasNonSmiUse() {
4260 for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
4261 // We check for observed_input_representation elsewhere.
4262 Representation use_rep =
4263 it.value()->RequiredInputRepresentation(it.index());
4264 if (!use_rep.IsNone() &&
4266 !use_rep.IsTagged()) {
4274 // Node-specific verification code is only included in debug mode.
4277 void HPhi::Verify() {
4278 ASSERT(OperandCount() == block()->predecessors()->length());
4279 for (int i = 0; i < OperandCount(); ++i) {
4280 HValue* value = OperandAt(i);
4281 HBasicBlock* defining_block = value->block();
4282 HBasicBlock* predecessor_block = block()->predecessors()->at(i);
4283 ASSERT(defining_block == predecessor_block ||
4284 defining_block->Dominates(predecessor_block));
4289 void HSimulate::Verify() {
4290 HInstruction::Verify();
4295 void HCheckHeapObject::Verify() {
4296 HInstruction::Verify();
4297 ASSERT(HasNoUses());
4301 void HCheckValue::Verify() {
4302 HInstruction::Verify();
4303 ASSERT(HasNoUses());
4309 HObjectAccess HObjectAccess::ForFixedArrayHeader(int offset) {
4310 ASSERT(offset >= 0);
4311 ASSERT(offset < FixedArray::kHeaderSize);
4312 if (offset == FixedArray::kLengthOffset) return ForFixedArrayLength();
4313 return HObjectAccess(kInobject, offset);
4317 HObjectAccess HObjectAccess::ForMapAndOffset(Handle<Map> map, int offset,
4318 Representation representation) {
4319 ASSERT(offset >= 0);
4320 Portion portion = kInobject;
4322 if (offset == JSObject::kElementsOffset) {
4323 portion = kElementsPointer;
4324 } else if (offset == JSObject::kMapOffset) {
4327 bool existing_inobject_property = true;
4328 if (!map.is_null()) {
4329 existing_inobject_property = (offset <
4330 map->instance_size() - map->unused_property_fields() * kPointerSize);
4332 return HObjectAccess(portion, offset, representation, Handle<String>::null(),
4333 false, existing_inobject_property);
4337 HObjectAccess HObjectAccess::ForAllocationSiteOffset(int offset) {
4339 case AllocationSite::kTransitionInfoOffset:
4340 return HObjectAccess(kInobject, offset, Representation::Tagged());
4341 case AllocationSite::kNestedSiteOffset:
4342 return HObjectAccess(kInobject, offset, Representation::Tagged());
4343 case AllocationSite::kPretenureDataOffset:
4344 return HObjectAccess(kInobject, offset, Representation::Smi());
4345 case AllocationSite::kPretenureCreateCountOffset:
4346 return HObjectAccess(kInobject, offset, Representation::Smi());
4347 case AllocationSite::kDependentCodeOffset:
4348 return HObjectAccess(kInobject, offset, Representation::Tagged());
4349 case AllocationSite::kWeakNextOffset:
4350 return HObjectAccess(kInobject, offset, Representation::Tagged());
4354 return HObjectAccess(kInobject, offset);
4358 HObjectAccess HObjectAccess::ForContextSlot(int index) {
4360 Portion portion = kInobject;
4361 int offset = Context::kHeaderSize + index * kPointerSize;
4362 ASSERT_EQ(offset, Context::SlotOffset(index) + kHeapObjectTag);
4363 return HObjectAccess(portion, offset, Representation::Tagged());
4367 HObjectAccess HObjectAccess::ForJSArrayOffset(int offset) {
4368 ASSERT(offset >= 0);
4369 Portion portion = kInobject;
4371 if (offset == JSObject::kElementsOffset) {
4372 portion = kElementsPointer;
4373 } else if (offset == JSArray::kLengthOffset) {
4374 portion = kArrayLengths;
4375 } else if (offset == JSObject::kMapOffset) {
4378 return HObjectAccess(portion, offset);
4382 HObjectAccess HObjectAccess::ForBackingStoreOffset(int offset,
4383 Representation representation) {
4384 ASSERT(offset >= 0);
4385 return HObjectAccess(kBackingStore, offset, representation,
4386 Handle<String>::null(), false, false);
4390 HObjectAccess HObjectAccess::ForField(Handle<Map> map,
4391 LookupResult* lookup,
4392 Handle<String> name) {
4393 ASSERT(lookup->IsField() || lookup->IsTransitionToField());
4395 Representation representation;
4396 if (lookup->IsField()) {
4397 index = lookup->GetLocalFieldIndexFromMap(*map);
4398 representation = lookup->representation();
4400 Map* transition = lookup->GetTransitionTarget();
4401 int descriptor = transition->LastAdded();
4402 index = transition->instance_descriptors()->GetFieldIndex(descriptor) -
4403 map->inobject_properties();
4404 PropertyDetails details =
4405 transition->instance_descriptors()->GetDetails(descriptor);
4406 representation = details.representation();
4409 // Negative property indices are in-object properties, indexed
4410 // from the end of the fixed part of the object.
4411 int offset = (index * kPointerSize) + map->instance_size();
4412 return HObjectAccess(kInobject, offset, representation, name, false, true);
4414 // Non-negative property indices are in the properties array.
4415 int offset = (index * kPointerSize) + FixedArray::kHeaderSize;
4416 return HObjectAccess(kBackingStore, offset, representation, name,
4422 HObjectAccess HObjectAccess::ForCellPayload(Isolate* isolate) {
4423 return HObjectAccess(
4424 kInobject, Cell::kValueOffset, Representation::Tagged(),
4425 Handle<String>(isolate->heap()->cell_value_string()));
4429 void HObjectAccess::SetGVNFlags(HValue *instr, bool is_store) {
4430 // set the appropriate GVN flags for a given load or store instruction
4432 // track dominating allocations in order to eliminate write barriers
4433 instr->SetGVNFlag(kDependsOnNewSpacePromotion);
4434 instr->SetFlag(HValue::kTrackSideEffectDominators);
4436 // try to GVN loads, but don't hoist above map changes
4437 instr->SetFlag(HValue::kUseGVN);
4438 instr->SetGVNFlag(kDependsOnMaps);
4441 switch (portion()) {
4443 instr->SetGVNFlag(is_store
4444 ? kChangesArrayLengths : kDependsOnArrayLengths);
4446 case kStringLengths:
4447 instr->SetGVNFlag(is_store
4448 ? kChangesStringLengths : kDependsOnStringLengths);
4451 instr->SetGVNFlag(is_store
4452 ? kChangesInobjectFields : kDependsOnInobjectFields);
4455 instr->SetGVNFlag(is_store
4456 ? kChangesDoubleFields : kDependsOnDoubleFields);
4459 instr->SetGVNFlag(is_store
4460 ? kChangesBackingStoreFields : kDependsOnBackingStoreFields);
4462 case kElementsPointer:
4463 instr->SetGVNFlag(is_store
4464 ? kChangesElementsPointer : kDependsOnElementsPointer);
4467 instr->SetGVNFlag(is_store
4468 ? kChangesMaps : kDependsOnMaps);
4470 case kExternalMemory:
4471 instr->SetGVNFlag(is_store
4472 ? kChangesExternalMemory : kDependsOnExternalMemory);
4478 void HObjectAccess::PrintTo(StringStream* stream) {
4481 switch (portion()) {
4483 case kStringLengths:
4484 stream->Add("%length");
4486 case kElementsPointer:
4487 stream->Add("%elements");
4490 stream->Add("%map");
4492 case kDouble: // fall through
4494 if (!name_.is_null()) {
4495 stream->Add(String::cast(*name_)->ToCString().get());
4497 stream->Add("[in-object]");
4500 if (!name_.is_null()) {
4501 stream->Add(String::cast(*name_)->ToCString().get());
4503 stream->Add("[backing-store]");
4505 case kExternalMemory:
4506 stream->Add("[external-memory]");
4510 stream->Add("@%d", offset());
4514 HInstruction* HNullarySIMDOperation::New(
4515 Zone* zone, HValue* context, BuiltinFunctionId op) {
4516 return new(zone) HNullarySIMDOperation(context, op);
4520 HInstruction* HUnarySIMDOperation::New(
4521 Zone* zone, HValue* context, HValue* value, BuiltinFunctionId op,
4522 Representation to) {
4523 return new(zone) HUnarySIMDOperation(context, value, op, to);
4527 HInstruction* HBinarySIMDOperation::New(
4528 Zone* zone, HValue* context, HValue* left, HValue* right,
4529 BuiltinFunctionId op) {
4530 return new(zone) HBinarySIMDOperation(context, left, right, op);
4534 HInstruction* HTernarySIMDOperation::New(
4535 Zone* zone, HValue* context, HValue* mask, HValue* left, HValue* right,
4536 BuiltinFunctionId op) {
4537 return new(zone) HTernarySIMDOperation(context, mask, left, right, op);
4541 HInstruction* HQuarternarySIMDOperation::New(
4542 Zone* zone, HValue* context, HValue* x, HValue* y, HValue* z, HValue* w,
4543 BuiltinFunctionId op) {
4544 return new(zone) HQuarternarySIMDOperation(context, x, y, z, w, op);
4548 const char* HNullarySIMDOperation::OpName() const {
4550 #define SIMD_NULLARY_OPERATION_CASE_ITEM(module, function, name, p4) \
4552 return #module "." #function;
4553 SIMD_NULLARY_OPERATIONS(SIMD_NULLARY_OPERATION_CASE_ITEM)
4554 #undef SIMD_NULLARY_OPERATION_CASE_ITEM
4562 void HNullarySIMDOperation::PrintDataTo(StringStream* stream) {
4563 const char* name = OpName();
4564 stream->Add("%s", name);
4568 const char* HUnarySIMDOperation::OpName() const {
4570 #define SIMD_UNARY_OPERATION_CASE_ITEM(module, function, name, p4, p5) \
4572 return #module "." #function;
4573 SIMD_UNARY_OPERATIONS(SIMD_UNARY_OPERATION_CASE_ITEM)
4574 SIMD_UNARY_OPERATIONS_FOR_PROPERTY_ACCESS(SIMD_UNARY_OPERATION_CASE_ITEM)
4575 #undef SIMD_UNARY_OPERATION_CASE_ITEM
4583 void HUnarySIMDOperation::PrintDataTo(StringStream* stream) {
4584 const char* name = OpName();
4585 stream->Add("%s ", name);
4586 value()->PrintNameTo(stream);
4590 const char* HBinarySIMDOperation::OpName() const {
4592 #define SIMD_BINARY_OPERATION_CASE_ITEM(module, function, name, p4, p5, p6) \
4594 return #module "." #function;
4595 SIMD_BINARY_OPERATIONS(SIMD_BINARY_OPERATION_CASE_ITEM)
4596 #undef SIMD_BINARY_OPERATION_CASE_ITEM
4604 void HBinarySIMDOperation::PrintDataTo(StringStream* stream) {
4605 const char* name = OpName();
4606 stream->Add("%s ", name);
4607 left()->PrintNameTo(stream);
4609 right()->PrintNameTo(stream);
4613 const char* HTernarySIMDOperation::OpName() const {
4615 #define SIMD_TERNARY_OPERATION_CASE_ITEM(module, function, name, p4, p5, p6, \
4618 return #module "." #function;
4619 SIMD_TERNARY_OPERATIONS(SIMD_TERNARY_OPERATION_CASE_ITEM)
4620 #undef SIMD_TERNARY_OPERATION_CASE_ITEM
4628 void HTernarySIMDOperation::PrintDataTo(StringStream* stream) {
4629 const char* name = OpName();
4630 stream->Add("%s ", name);
4631 first()->PrintNameTo(stream);
4633 second()->PrintNameTo(stream);
4635 third()->PrintNameTo(stream);
4639 const char* HQuarternarySIMDOperation::OpName() const {
4641 #define SIMD_QUARTERNARY_OPERATION_CASE_ITEM(module, function, name, p4, p5, \
4644 return #module "." #function;
4645 SIMD_QUARTERNARY_OPERATIONS(SIMD_QUARTERNARY_OPERATION_CASE_ITEM)
4646 #undef SIMD_QUARTERNARY_OPERATION_CASE_ITEM
4654 void HQuarternarySIMDOperation::PrintDataTo(StringStream* stream) {
4655 const char* name = OpName();
4656 stream->Add("%s ", name);
4657 x()->PrintNameTo(stream);
4659 y()->PrintNameTo(stream);
4661 z()->PrintNameTo(stream);
4663 w()->PrintNameTo(stream);
4667 } } // namespace v8::internal