}
DECLARE_CONCRETE_INSTRUCTION(NumberUntagD, "double-untag")
+ DECLARE_HYDROGEN_ACCESSOR(Change)
};
void LCodeGen::EmitNumberUntagD(Register input_reg,
DoubleRegister result_reg,
+ bool deoptimize_on_undefined,
LEnvironment* env) {
Register scratch = scratch0();
SwVfpRegister flt_scratch = s0;
__ ldr(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset));
__ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
__ cmp(scratch, Operand(ip));
- __ b(eq, &heap_number);
+ if (deoptimize_on_undefined) {
+ DeoptimizeIf(ne, env);
+ } else {
+ Label heap_number;
+ __ b(eq, &heap_number);
- __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
- __ cmp(input_reg, Operand(ip));
- DeoptimizeIf(ne, env);
+ __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
+ __ cmp(input_reg, Operand(ip));
+ DeoptimizeIf(ne, env);
- // Convert undefined to NaN.
- __ LoadRoot(ip, Heap::kNanValueRootIndex);
- __ sub(ip, ip, Operand(kHeapObjectTag));
- __ vldr(result_reg, ip, HeapNumber::kValueOffset);
- __ jmp(&done);
+ // Convert undefined to NaN.
+ __ LoadRoot(ip, Heap::kNanValueRootIndex);
+ __ sub(ip, ip, Operand(kHeapObjectTag));
+ __ vldr(result_reg, ip, HeapNumber::kValueOffset);
+ __ jmp(&done);
+ __ bind(&heap_number);
+ }
// Heap number to double register conversion.
- __ bind(&heap_number);
__ sub(ip, input_reg, Operand(kHeapObjectTag));
__ vldr(result_reg, ip, HeapNumber::kValueOffset);
__ jmp(&done);
Register input_reg = ToRegister(input);
DoubleRegister result_reg = ToDoubleRegister(result);
- EmitNumberUntagD(input_reg, result_reg, instr->environment());
+ EmitNumberUntagD(input_reg, result_reg,
+ instr->hydrogen()->deoptimize_on_undefined(),
+ instr->environment());
}
void EmitCmpI(LOperand* left, LOperand* right);
void EmitNumberUntagD(Register input,
DoubleRegister result,
+ bool deoptimize_on_undefined,
LEnvironment* env);
// Emits optimized code for typeof x == "y". Modifies input register.
if (r.IsTagged()) {
SetAllSideEffects();
ClearFlag(kUseGVN);
+ } else if (r.IsDouble()) {
+ SetFlag(kDeoptimizeOnUndefined);
+ ClearAllSideEffects();
+ SetFlag(kUseGVN);
} else {
ClearAllSideEffects();
SetFlag(kUseGVN);
kCanOverflow,
kBailoutOnMinusZero,
kCanBeDivByZero,
+ kDeoptimizeOnUndefined,
kIsArguments,
kTruncatingToInt32,
kLastFlag = kTruncatingToInt32
HChange(HValue* value,
Representation from,
Representation to,
- bool is_truncating)
- : HUnaryOperation(value), from_(from) {
+ bool is_truncating,
+ bool deoptimize_on_undefined)
+ : HUnaryOperation(value),
+ from_(from),
+ deoptimize_on_undefined_(deoptimize_on_undefined) {
ASSERT(!from.IsNone() && !to.IsNone());
ASSERT(!from.Equals(to));
set_representation(to);
Representation from() const { return from_; }
Representation to() const { return representation(); }
+ bool deoptimize_on_undefined() const { return deoptimize_on_undefined_; }
virtual Representation RequiredInputRepresentation(int index) const {
return from_;
}
if (!other->IsChange()) return false;
HChange* change = HChange::cast(other);
return value() == change->value()
- && to().Equals(change->to());
+ && to().Equals(change->to())
+ && deoptimize_on_undefined() == change->deoptimize_on_undefined();
}
private:
Representation from_;
+ bool deoptimize_on_undefined_;
};
// change instructions for them.
HInstruction* new_value = NULL;
bool is_truncating = use_value->CheckFlag(HValue::kTruncatingToInt32);
+ bool deoptimize_on_undefined =
+ use_value->CheckFlag(HValue::kDeoptimizeOnUndefined);
if (value->IsConstant()) {
HConstant* constant = HConstant::cast(value);
// Try to create a new copy of the constant with the new representation.
}
if (new_value == NULL) {
- new_value =
- new(zone()) HChange(value, value->representation(), to, is_truncating);
+ new_value = new(zone()) HChange(value, value->representation(), to,
+ is_truncating, deoptimize_on_undefined);
}
new_value->InsertBefore(next);
}
+void HGraph::RecursivelyMarkPhiDeoptimizeOnUndefined(HPhi* phi) {
+ if (phi->CheckFlag(HValue::kDeoptimizeOnUndefined)) return;
+ phi->SetFlag(HValue::kDeoptimizeOnUndefined);
+ for (int i = 0; i < phi->OperandCount(); ++i) {
+ HValue* input = phi->OperandAt(i);
+ if (input->IsPhi()) {
+ RecursivelyMarkPhiDeoptimizeOnUndefined(HPhi::cast(input));
+ }
+ }
+}
+
+
+void HGraph::MarkDeoptimizeOnUndefined() {
+ HPhase phase("MarkDeoptimizeOnUndefined", this);
+ // Compute DeoptimizeOnUndefined flag for phis.
+ // Any phi that can reach a use with DeoptimizeOnUndefined set must
+ // have DeoptimizeOnUndefined set. Currently only HCompare, with
+ // double input representation, has this flag set.
+ // The flag is used by HChange tagged->double, which must deoptimize
+ // if one of its uses has this flag set.
+ for (int i = 0; i < phi_list()->length(); i++) {
+ HPhi* phi = phi_list()->at(i);
+ if (phi->representation().IsDouble()) {
+ for (HUseIterator it(phi->uses()); !it.Done(); it.Advance()) {
+ if (it.value()->CheckFlag(HValue::kDeoptimizeOnUndefined)) {
+ RecursivelyMarkPhiDeoptimizeOnUndefined(phi);
+ break;
+ }
+ }
+ }
+ }
+}
+
+
void HGraph::ComputeMinusZeroChecks() {
BitVector visited(GetMaximumValueID());
for (int i = 0; i < blocks_.length(); ++i) {
graph()->InitializeInferredTypes();
graph()->Canonicalize();
+ graph()->MarkDeoptimizeOnUndefined();
graph()->InsertRepresentationChanges();
graph()->ComputeMinusZeroChecks();
void InitializeInferredTypes();
void InsertTypeConversions();
void InsertRepresentationChanges();
+ void MarkDeoptimizeOnUndefined();
void ComputeMinusZeroChecks();
bool ProcessArgumentsObject();
void EliminateRedundantPhis();
void InsertTypeConversions(HInstruction* instr);
void PropagateMinusZeroChecks(HValue* value, BitVector* visited);
+ void RecursivelyMarkPhiDeoptimizeOnUndefined(HPhi* phi);
void InsertRepresentationChangeForUse(HValue* value,
HValue* use_value,
int use_index,
void LCodeGen::EmitNumberUntagD(Register input_reg,
XMMRegister result_reg,
+ bool deoptimize_on_undefined,
LEnvironment* env) {
- Label load_smi, heap_number, done;
+ Label load_smi, done;
// Smi check.
__ test(input_reg, Immediate(kSmiTagMask));
// Heap number map check.
__ cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
factory()->heap_number_map());
- __ j(equal, &heap_number, Label::kNear);
+ if (deoptimize_on_undefined) {
+ DeoptimizeIf(not_equal, env);
+ } else {
+ Label heap_number;
+ __ j(equal, &heap_number, Label::kNear);
- __ cmp(input_reg, factory()->undefined_value());
- DeoptimizeIf(not_equal, env);
+ __ cmp(input_reg, factory()->undefined_value());
+ DeoptimizeIf(not_equal, env);
- // Convert undefined to NaN.
- ExternalReference nan = ExternalReference::address_of_nan();
- __ movdbl(result_reg, Operand::StaticVariable(nan));
- __ jmp(&done, Label::kNear);
+ // Convert undefined to NaN.
+ ExternalReference nan = ExternalReference::address_of_nan();
+ __ movdbl(result_reg, Operand::StaticVariable(nan));
+ __ jmp(&done, Label::kNear);
+ __ bind(&heap_number);
+ }
// Heap number to XMM conversion.
- __ bind(&heap_number);
__ movdbl(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset));
__ jmp(&done, Label::kNear);
Register input_reg = ToRegister(input);
XMMRegister result_reg = ToDoubleRegister(result);
- EmitNumberUntagD(input_reg, result_reg, instr->environment());
+ EmitNumberUntagD(input_reg, result_reg,
+ instr->hydrogen()->deoptimize_on_undefined(),
+ instr->environment());
}
void EmitGoto(int block, LDeferredCode* deferred_stack_check = NULL);
void EmitBranch(int left_block, int right_block, Condition cc);
void EmitCmpI(LOperand* left, LOperand* right);
- void EmitNumberUntagD(Register input, XMMRegister result, LEnvironment* env);
+ void EmitNumberUntagD(Register input,
+ XMMRegister result,
+ bool deoptimize_on_undefined,
+ LEnvironment* env);
// Emits optimized code for typeof x == "y". Modifies input register.
// Returns the condition on which a final split to
}
DECLARE_CONCRETE_INSTRUCTION(NumberUntagD, "double-untag")
+ DECLARE_HYDROGEN_ACCESSOR(Change);
};
void LCodeGen::EmitNumberUntagD(Register input_reg,
XMMRegister result_reg,
+ bool deoptimize_on_undefined,
LEnvironment* env) {
- Label load_smi, heap_number, done;
+ Label load_smi, done;
// Smi check.
__ JumpIfSmi(input_reg, &load_smi, Label::kNear);
// Heap number map check.
__ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset),
Heap::kHeapNumberMapRootIndex);
- __ j(equal, &heap_number, Label::kNear);
+ if (deoptimize_on_undefined) {
+ DeoptimizeIf(not_equal, env);
+ } else {
+ Label heap_number;
+ __ j(equal, &heap_number, Label::kNear);
- __ CompareRoot(input_reg, Heap::kUndefinedValueRootIndex);
- DeoptimizeIf(not_equal, env);
+ __ CompareRoot(input_reg, Heap::kUndefinedValueRootIndex);
+ DeoptimizeIf(not_equal, env);
- // Convert undefined to NaN. Compute NaN as 0/0.
- __ xorps(result_reg, result_reg);
- __ divsd(result_reg, result_reg);
- __ jmp(&done, Label::kNear);
+ // Convert undefined to NaN. Compute NaN as 0/0.
+ __ xorps(result_reg, result_reg);
+ __ divsd(result_reg, result_reg);
+ __ jmp(&done, Label::kNear);
+ __ bind(&heap_number);
+ }
// Heap number to XMM conversion.
- __ bind(&heap_number);
__ movsd(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset));
__ jmp(&done, Label::kNear);
Register input_reg = ToRegister(input);
XMMRegister result_reg = ToDoubleRegister(result);
- EmitNumberUntagD(input_reg, result_reg, instr->environment());
+ EmitNumberUntagD(input_reg, result_reg,
+ instr->hydrogen()->deoptimize_on_undefined(),
+ instr->environment());
}
void EmitGoto(int block, LDeferredCode* deferred_stack_check = NULL);
void EmitBranch(int left_block, int right_block, Condition cc);
void EmitCmpI(LOperand* left, LOperand* right);
- void EmitNumberUntagD(Register input, XMMRegister result, LEnvironment* env);
+ void EmitNumberUntagD(Register input,
+ XMMRegister result,
+ bool deoptimize_on_undefined,
+ LEnvironment* env);
// Emits optimized code for typeof x == "y". Modifies input register.
// Returns the condition on which a final split to
}
DECLARE_CONCRETE_INSTRUCTION(NumberUntagD, "double-untag")
+ DECLARE_HYDROGEN_ACCESSOR(Change);
};
# All tests in the bug directory are expected to fail.
bugs: FAIL
-# Reliably fails when crankshaft is active, passes with --nocrankshaft
-bugs/bug-1434: FAIL || PASS
-
##############################################################################
# Too slow in debug mode with --stress-opt
compiler/regress-stacktrace-methods: PASS, SKIP if $mode == debug
-// Copyright 2008 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met: