length()->PrintNameTo(stream);
}
+
void HCallConstantFunction::PrintDataTo(StringStream* stream) {
if (IsApplyFunction()) {
stream->Add("optimized apply ");
}
+Range* HChange::InferRange() {
+ Range* input_range = value()->range();
+ if (from().IsInteger32() &&
+ to().IsTagged() &&
+ input_range != NULL && input_range->IsInSmiRange()) {
+ set_type(HType::Smi());
+ }
+ Range* result = (input_range != NULL)
+ ? input_range->Copy()
+ : HValue::InferRange();
+ if (to().IsInteger32()) result->set_can_be_minus_zero(false);
+ return result;
+}
+
+
Range* HConstant::InferRange() {
if (has_int32_value_) {
Range* result = new Range(int32_value_, int32_value_);
? left()->range()->Copy()
: new Range();
result->Sar(c->Integer32Value());
+ result->set_can_be_minus_zero(false);
return result;
}
}
? left()->range()->Copy()
: new Range();
result->Sar(c->Integer32Value());
+ result->set_can_be_minus_zero(false);
return result;
}
}
? left()->range()->Copy()
: new Range();
result->Shl(c->Integer32Value());
+ result->set_can_be_minus_zero(false);
return result;
}
}
Range* next() const { return next_; }
Range* CopyClearLower() const { return new Range(kMinInt, upper_); }
Range* CopyClearUpper() const { return new Range(lower_, kMaxInt); }
- Range* Copy() const { return new Range(lower_, upper_); }
+ Range* Copy() const {
+ Range* result = new Range(lower_, upper_);
+ result->set_can_be_minus_zero(CanBeMinusZero());
+ return result;
+ }
int32_t Mask() const;
void set_can_be_minus_zero(bool b) { can_be_minus_zero_ = b; }
bool CanBeMinusZero() const { return CanBeZero() && can_be_minus_zero_; }
bool CanBeZero() const { return upper_ >= 0 && lower_ <= 0; }
bool CanBeNegative() const { return lower_ < 0; }
bool Includes(int value) const { return lower_ <= value && upper_ >= value; }
- bool IsMostGeneric() const { return lower_ == kMinInt && upper_ == kMaxInt; }
+ bool IsMostGeneric() const {
+ return lower_ == kMinInt && upper_ == kMaxInt && CanBeMinusZero();
+ }
bool IsInSmiRange() const {
return lower_ >= Smi::kMinValue && upper_ <= Smi::kMaxValue;
}
virtual bool IsConvertibleToInteger() const { return true; }
HType type() const { return type_; }
- void set_type(HType type) {
- ASSERT(HasNoUses());
- type_ = type;
+ void set_type(HType new_type) {
+ ASSERT(new_type.IsSubtypeOf(type_));
+ type_ = new_type;
}
// An operation needs to override this function iff:
set_representation(to);
SetFlag(kUseGVN);
if (is_truncating) SetFlag(kTruncatingToInt32);
- if (from.IsInteger32() && to.IsTagged() && value->range() != NULL &&
- value->range()->IsInSmiRange()) {
- set_type(HType::Smi());
- }
}
virtual HValue* EnsureAndPropagateNotMinusZero(BitVector* visited);
return from_;
}
+ virtual Range* InferRange();
+
virtual void PrintDataTo(StringStream* stream);
DECLARE_CONCRETE_INSTRUCTION(Change)
HInferRepresentation rep(graph());
rep.Analyze();
+ graph()->MarkDeoptimizeOnUndefined();
+ graph()->InsertRepresentationChanges();
+
if (FLAG_use_range) {
HRangeAnalysis rangeAnalysis(graph());
rangeAnalysis.Analyze();
}
-
graph()->InitializeInferredTypes();
graph()->Canonicalize();
- graph()->MarkDeoptimizeOnUndefined();
- graph()->InsertRepresentationChanges();
graph()->ComputeMinusZeroChecks();
// Eliminate redundant stack checks on backwards branches.