void LCodeGen::EmitNumberUntagD(Register input_reg,
DwVfpRegister result_reg,
- bool deoptimize_on_undefined,
+ bool allow_undefined_as_nan,
bool deoptimize_on_minus_zero,
LEnvironment* env,
NumberUntagDMode mode) {
__ ldr(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset));
__ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
__ cmp(scratch, Operand(ip));
- if (deoptimize_on_undefined) {
+ if (!allow_undefined_as_nan) {
DeoptimizeIf(ne, env);
} else {
Label heap_number, convert;
}
EmitNumberUntagD(input_reg, result_reg,
- instr->hydrogen()->deoptimize_on_undefined(),
+ instr->hydrogen()->allow_undefined_as_nan(),
instr->hydrogen()->deoptimize_on_minus_zero(),
instr->environment(),
mode);
void EmitBranch(int left_block, int right_block, Condition cc);
void EmitNumberUntagD(Register input,
DwVfpRegister result,
- bool deoptimize_on_undefined,
+ bool allow_undefined_as_nan,
bool deoptimize_on_minus_zero,
LEnvironment* env,
NumberUntagDMode mode);
if (CanTruncateToInt32()) stream->Add(" truncating-int32");
if (CheckFlag(kBailoutOnMinusZero)) stream->Add(" -0?");
- if (CheckFlag(kDeoptimizeOnUndefined)) stream->Add(" deopt-on-undefined");
+ if (CheckFlag(kAllowUndefinedAsNaN)) stream->Add(" allow-undefined-as-nan");
}
// (false). Therefore, any comparisons other than ordered relational
// comparisons must cause a deopt when one of their arguments is undefined.
// See also v8:1434
- if (!Token::IsOrderedRelationalCompareOp(token_)) {
- SetFlag(kDeoptimizeOnUndefined);
+ if (Token::IsOrderedRelationalCompareOp(token_)) {
+ SetFlag(kAllowUndefinedAsNaN);
}
}
ChangeRepresentation(rep);
for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
HValue* use = it.value();
- if (use->CheckFlag(HValue::kDeoptimizeOnUndefined)) {
+ if (!use->CheckFlag(HValue::kAllowUndefinedAsNaN)) {
return false;
}
}
// track dominating allocations in order to eliminate write barriers
instr->SetGVNFlag(kDependsOnNewSpacePromotion);
instr->SetFlag(HValue::kTrackSideEffectDominators);
- instr->SetFlag(HValue::kDeoptimizeOnUndefined);
} else {
// try to GVN loads, but don't hoist above map changes
instr->SetFlag(HValue::kUseGVN);
kCanOverflow,
kBailoutOnMinusZero,
kCanBeDivByZero,
- kDeoptimizeOnUndefined,
+ kAllowUndefinedAsNaN,
kIsArguments,
kTruncatingToInt32,
// Set after an instruction is killed.
HChange(HValue* value,
Representation to,
bool is_truncating,
- bool deoptimize_on_undefined)
+ bool allow_undefined_as_nan)
: HUnaryOperation(value) {
ASSERT(!value->representation().IsNone());
ASSERT(!to.IsNone());
ASSERT(!value->representation().Equals(to));
set_representation(to);
SetFlag(kUseGVN);
- if (deoptimize_on_undefined) SetFlag(kDeoptimizeOnUndefined);
+ if (allow_undefined_as_nan) SetFlag(kAllowUndefinedAsNaN);
if (is_truncating) SetFlag(kTruncatingToInt32);
if (value->representation().IsSmi() || value->type().IsSmi()) {
set_type(HType::Smi());
Representation from() const { return value()->representation(); }
Representation to() const { return representation(); }
- bool deoptimize_on_undefined() const {
- return CheckFlag(kDeoptimizeOnUndefined);
+ bool allow_undefined_as_nan() const {
+ return CheckFlag(kAllowUndefinedAsNaN);
}
bool deoptimize_on_minus_zero() const {
return CheckFlag(kBailoutOnMinusZero);
explicit HClampToUint8(HValue* value)
: HUnaryOperation(value) {
set_representation(Representation::Integer32());
+ SetFlag(kAllowUndefinedAsNaN);
SetFlag(kUseGVN);
}
set_representation(Representation::Integer32());
SetFlag(kUseGVN);
SetFlag(kTruncatingToInt32);
+ SetFlag(kAllowUndefinedAsNaN);
}
virtual Representation RequiredInputRepresentation(int index) {
UNREACHABLE();
}
SetFlag(kUseGVN);
+ SetFlag(kAllowUndefinedAsNaN);
}
virtual bool IsDeletable() const { return true; }
}
ASSERT(merged_index >= 0);
SetFlag(kFlexibleRepresentation);
+ SetFlag(kAllowUndefinedAsNaN);
}
virtual Representation RepresentationFromInputs();
: HBinaryOperation(context, left, right) {
SetFlag(kFlexibleRepresentation);
SetFlag(kTruncatingToInt32);
+ SetFlag(kAllowUndefinedAsNaN);
SetAllSideEffects();
}
if (!right->IsConstant()) {
SetFlag(kCanBeDivByZero);
}
+ SetFlag(kAllowUndefinedAsNaN);
}
virtual HValue* EnsureAndPropagateNotMinusZero(BitVector* visited);
: HBinaryOperation(context, left, right) {
SetAllSideEffects();
SetFlag(kFlexibleRepresentation);
+ SetFlag(kAllowUndefinedAsNaN);
}
virtual void RepresentationChanged(Representation to) {
}
if (is_external()) {
SetGVNFlag(kChangesSpecializedArrayElements);
+ SetFlag(kAllowUndefinedAsNaN);
} else if (IsFastDoubleElementsKind(elements_kind)) {
SetGVNFlag(kChangesDoubleArrayElements);
- SetFlag(kDeoptimizeOnUndefined);
} else if (IsFastSmiElementsKind(elements_kind)) {
SetGVNFlag(kChangesArrayElements);
- SetFlag(kDeoptimizeOnUndefined);
} else {
SetGVNFlag(kChangesArrayElements);
}
? FAST_HOLEY_ELEMENTS : to_elements_kind;
HInstruction* holey_store = AddInstruction(
new(zone()) HStoreKeyed(to_elements, key, element, holey_kind));
- holey_store->ClearFlag(HValue::kDeoptimizeOnUndefined);
+ // Allow NaN hole values to converted to their tagged counterparts.
+ if (IsFastHoleyElementsKind(to_elements_kind)) {
+ holey_store->SetFlag(HValue::kAllowUndefinedAsNaN);
+ }
builder.EndBody();
// change instructions for them.
HInstruction* new_value = NULL;
bool is_truncating = use_value->CheckFlag(HValue::kTruncatingToInt32);
- bool deoptimize_on_undefined =
- use_value->CheckFlag(HValue::kDeoptimizeOnUndefined);
+ bool allow_undefined_as_nan =
+ use_value->CheckFlag(HValue::kAllowUndefinedAsNaN);
if (value->IsConstant()) {
HConstant* constant = HConstant::cast(value);
// Try to create a new copy of the constant with the new representation.
if (new_value == NULL) {
new_value = new(zone()) HChange(value, to,
- is_truncating, deoptimize_on_undefined);
+ is_truncating, allow_undefined_as_nan);
}
new_value->InsertBefore(next);
void HGraph::RecursivelyMarkPhiDeoptimizeOnUndefined(HPhi* phi) {
- if (phi->CheckFlag(HValue::kDeoptimizeOnUndefined)) return;
- phi->SetFlag(HValue::kDeoptimizeOnUndefined);
+ if (!phi->CheckFlag(HValue::kAllowUndefinedAsNaN)) return;
+ phi->ClearFlag(HValue::kAllowUndefinedAsNaN);
for (int i = 0; i < phi->OperandCount(); ++i) {
HValue* input = phi->OperandAt(i);
if (input->IsPhi()) {
// if one of its uses has this flag set.
for (int i = 0; i < phi_list()->length(); i++) {
HPhi* phi = phi_list()->at(i);
- if (phi->representation().IsDouble()) {
- for (HUseIterator it(phi->uses()); !it.Done(); it.Advance()) {
- int use_index = it.index();
- HValue* use_value = it.value();
- Representation req = use_value->RequiredInputRepresentation(use_index);
- if (!req.IsDouble() ||
- use_value->CheckFlag(HValue::kDeoptimizeOnUndefined)) {
- RecursivelyMarkPhiDeoptimizeOnUndefined(phi);
- break;
- }
+ for (HUseIterator it(phi->uses()); !it.Done(); it.Advance()) {
+ HValue* use_value = it.value();
+ if (!use_value->CheckFlag(HValue::kAllowUndefinedAsNaN)) {
+ RecursivelyMarkPhiDeoptimizeOnUndefined(phi);
+ break;
}
}
}
boilerplate_elements, key_constant, NULL, kind, ALLOW_RETURN_HOLE));
HInstruction* store = AddInstruction(new(zone) HStoreKeyed(
object_elements, key_constant, value_instruction, kind));
- store->ClearFlag(HValue::kDeoptimizeOnUndefined);
+ store->SetFlag(HValue::kAllowUndefinedAsNaN);
}
}
void LCodeGen::EmitNumberUntagDNoSSE2(Register input_reg,
Register temp_reg,
- bool deoptimize_on_undefined,
+ bool allow_undefined_as_nan,
bool deoptimize_on_minus_zero,
LEnvironment* env,
NumberUntagDMode mode) {
// Heap number map check.
__ cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
factory()->heap_number_map());
- if (deoptimize_on_undefined) {
+ if (!allow_undefined_as_nan) {
DeoptimizeIf(not_equal, env);
} else {
Label heap_number, convert;
void LCodeGen::EmitNumberUntagD(Register input_reg,
Register temp_reg,
XMMRegister result_reg,
- bool deoptimize_on_undefined,
+ bool allow_undefined_as_nan,
bool deoptimize_on_minus_zero,
LEnvironment* env,
NumberUntagDMode mode) {
// Heap number map check.
__ cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
factory()->heap_number_map());
- if (deoptimize_on_undefined) {
+ if (!allow_undefined_as_nan) {
DeoptimizeIf(not_equal, env);
} else {
Label heap_number, convert;
EmitNumberUntagD(input_reg,
temp_reg,
result_reg,
- instr->hydrogen()->deoptimize_on_undefined(),
+ instr->hydrogen()->allow_undefined_as_nan(),
deoptimize_on_minus_zero,
instr->environment(),
mode);
} else {
EmitNumberUntagDNoSSE2(input_reg,
temp_reg,
- instr->hydrogen()->deoptimize_on_undefined(),
+ instr->hydrogen()->allow_undefined_as_nan(),
deoptimize_on_minus_zero,
instr->environment(),
mode);
Register input,
Register temp,
XMMRegister result,
- bool deoptimize_on_undefined,
+ bool allow_undefined_as_nan,
bool deoptimize_on_minus_zero,
LEnvironment* env,
NumberUntagDMode mode = NUMBER_CANDIDATE_IS_ANY_TAGGED);
void EmitNumberUntagDNoSSE2(
Register input,
Register temp,
- bool deoptimize_on_undefined,
+ bool allow_undefined_as_nan,
bool deoptimize_on_minus_zero,
LEnvironment* env,
NumberUntagDMode mode = NUMBER_CANDIDATE_IS_ANY_TAGGED);
void LCodeGen::EmitNumberUntagD(Register input_reg,
XMMRegister result_reg,
- bool deoptimize_on_undefined,
+ bool allow_undefined_as_nan,
bool deoptimize_on_minus_zero,
LEnvironment* env,
NumberUntagDMode mode) {
// Heap number map check.
__ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset),
Heap::kHeapNumberMapRootIndex);
- if (deoptimize_on_undefined) {
+ if (!allow_undefined_as_nan) {
DeoptimizeIf(not_equal, env);
} else {
Label heap_number, convert;
}
EmitNumberUntagD(input_reg, result_reg,
- instr->hydrogen()->deoptimize_on_undefined(),
+ instr->hydrogen()->allow_undefined_as_nan(),
instr->hydrogen()->deoptimize_on_minus_zero(),
instr->environment(),
mode);
void EmitNumberUntagD(
Register input,
XMMRegister result,
- bool deoptimize_on_undefined,
+ bool allow_undefined_as_nan,
bool deoptimize_on_minus_zero,
LEnvironment* env,
NumberUntagDMode mode = NUMBER_CANDIDATE_IS_ANY_TAGGED);
// Flags: --allow-natives-syntax
-function f(test, test2, a, i) {
+function f_store(test, test2, a, i) {
var o = [0.5,1,,3];
var d;
if (test) {
return d;
}
-var a = [0, 0, 0, {}];
-f(true, false, a, 0);
-f(true, true, a, 0);
-f(false, false, a, 1);
-f(false, true, a, 1);
-%OptimizeFunctionOnNextCall(f);
-f(false, false, a, 2);
-assertEquals(undefined, a[2]);
+var a1 = [0, 0, 0, {}];
+f_store(true, false, a1, 0);
+f_store(true, true, a1, 0);
+f_store(false, false, a1, 1);
+f_store(false, true, a1, 1);
+%OptimizeFunctionOnNextCall(f_store);
+f_store(false, false, a1, 2);
+assertEquals(undefined, a1[2]);
+
+function test_arg(expected) {
+ return function(v) {
+ assertEquals(expected, v);
+ }
+}
+
+function f_call(f, test, test2, i) {
+ var o = [0.5,1,,3];
+ var d;
+ if (test) {
+ d = 1.5;
+ } else {
+ d = o[i];
+ }
+ if (test2) {
+ d += 1;
+ }
+ f(d);
+ return d;
+}
+
+f_call(test_arg(1.5), true, false, 0);
+f_call(test_arg(2.5), true, true, 0);
+f_call(test_arg(1), false, false, 1);
+f_call(test_arg(2), false, true, 1);
+%OptimizeFunctionOnNextCall(f_call);
+f_call(test_arg(undefined), false, false, 2);
+
+
+function f_external(test, test2, test3, a, i) {
+ var o = [0.5,1,,3];
+ var d;
+ if (test) {
+ d = 1.5;
+ } else {
+ d = o[i];
+ }
+ if (test2) {
+ d += 1;
+ }
+ if (test3) {
+ d = d|0;
+ }
+ a[d] = 1;
+ return d;
+}
+
+var a2 = new Int32Array(10);
+f_external(true, false, true, a2, 0);
+f_external(true, true, true, a2, 0);
+f_external(false, false, true, a2, 1);
+f_external(false, true, true, a2, 1);
+%OptimizeFunctionOnNextCall(f_external);
+f_external(false, false, false, a2, 2);
+assertEquals(1, a2[undefined]);