}
+LInstruction* LChunkBuilder::DoCompareHoleAndBranch(
+ HCompareHoleAndBranch* instr) {
+ LOperand* object = UseRegisterAtStart(instr->object());
+ return new(zone()) LCmpHoleAndBranch(object);
+}
+
+
LInstruction* LChunkBuilder::DoIsObjectAndBranch(HIsObjectAndBranch* instr) {
ASSERT(instr->value()->representation().IsTagged());
LOperand* value = UseRegisterAtStart(instr->value());
V(ClassOfTestAndBranch) \
V(CompareNumericAndBranch) \
V(CmpObjectEqAndBranch) \
+ V(CmpHoleAndBranch) \
V(CmpMapAndBranch) \
V(CmpT) \
V(ConstantD) \
LOperand* left() { return inputs_[0]; }
LOperand* right() { return inputs_[1]; }
- DECLARE_CONCRETE_INSTRUCTION(CmpObjectEqAndBranch,
- "cmp-object-eq-and-branch")
+ DECLARE_CONCRETE_INSTRUCTION(CmpObjectEqAndBranch, "cmp-object-eq-and-branch")
DECLARE_HYDROGEN_ACCESSOR(CompareObjectEqAndBranch)
};
+class LCmpHoleAndBranch: public LControlInstruction<1, 0> {
+ public:
+ explicit LCmpHoleAndBranch(LOperand* object) {
+ inputs_[0] = object;
+ }
+
+ LOperand* object() { return inputs_[0]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(CmpHoleAndBranch, "cmp-hole-and-branch")
+ DECLARE_HYDROGEN_ACCESSOR(CompareHoleAndBranch)
+};
+
+
class LIsObjectAndBranch: public LControlInstruction<1, 1> {
public:
LIsObjectAndBranch(LOperand* value, LOperand* temp) {
}
-void LCodeGen::DeoptimizeIf(Condition cc,
+void LCodeGen::DeoptimizeIf(Condition condition,
LEnvironment* environment,
Deoptimizer::BailoutType bailout_type) {
RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
}
if (info()->ShouldTrapOnDeopt()) {
- __ stop("trap_on_deopt", cc);
+ __ stop("trap_on_deopt", condition);
}
ASSERT(info()->IsStub() || frame_is_built_);
- if (cc == al && frame_is_built_) {
+ if (condition == al && frame_is_built_) {
__ Call(entry, RelocInfo::RUNTIME_ENTRY);
} else {
// We often have several deopts to the same entry, reuse the last
!frame_is_built_);
deopt_jump_table_.Add(table_entry, zone());
}
- __ b(cc, &deopt_jump_table_.last().label);
+ __ b(condition, &deopt_jump_table_.last().label);
}
}
-void LCodeGen::DeoptimizeIf(Condition cc,
+void LCodeGen::DeoptimizeIf(Condition condition,
LEnvironment* environment) {
Deoptimizer::BailoutType bailout_type = info()->IsStub()
? Deoptimizer::LAZY
: Deoptimizer::EAGER;
- DeoptimizeIf(cc, environment, bailout_type);
+ DeoptimizeIf(condition, environment, bailout_type);
}
}
template<class InstrType>
-void LCodeGen::EmitBranch(InstrType instr, Condition cc) {
+void LCodeGen::EmitBranch(InstrType instr, Condition condition) {
int left_block = instr->TrueDestination(chunk_);
int right_block = instr->FalseDestination(chunk_);
int next_block = GetNextEmittedBlock();
- if (right_block == left_block || cc == al) {
+ if (right_block == left_block || condition == al) {
EmitGoto(left_block);
} else if (left_block == next_block) {
- __ b(NegateCondition(cc), chunk_->GetAssemblyLabel(right_block));
+ __ b(NegateCondition(condition), chunk_->GetAssemblyLabel(right_block));
} else if (right_block == next_block) {
- __ b(cc, chunk_->GetAssemblyLabel(left_block));
+ __ b(condition, chunk_->GetAssemblyLabel(left_block));
} else {
- __ b(cc, chunk_->GetAssemblyLabel(left_block));
+ __ b(condition, chunk_->GetAssemblyLabel(left_block));
__ b(chunk_->GetAssemblyLabel(right_block));
}
}
+template<class InstrType>
+void LCodeGen::EmitFalseBranch(InstrType instr, Condition condition) {
+ int false_block = instr->FalseDestination(chunk_);
+ __ b(condition, chunk_->GetAssemblyLabel(false_block));
+}
+
+
void LCodeGen::DoDebugBreak(LDebugBreak* instr) {
__ stop("LBreak");
}
}
+void LCodeGen::DoCmpHoleAndBranch(LCmpHoleAndBranch* instr) {
+ if (instr->hydrogen()->representation().IsTagged()) {
+ Register input_reg = ToRegister(instr->object());
+ __ mov(ip, Operand(factory()->the_hole_value()));
+ __ cmp(input_reg, ip);
+ EmitBranch(instr, eq);
+ return;
+ }
+
+ DwVfpRegister input_reg = ToDoubleRegister(instr->object());
+ __ VFPCompareAndSetFlags(input_reg, input_reg);
+ EmitFalseBranch(instr, vc);
+
+ Register scratch = scratch0();
+ __ VmovHigh(scratch, input_reg);
+ __ cmp(scratch, Operand(kHoleNanUpper32));
+ EmitBranch(instr, eq);
+}
+
+
Condition LCodeGen::EmitIsObject(Register input,
Register temp1,
Label* is_not_object,
}
-void LCodeGen::ApplyCheckIf(Condition cc, LBoundsCheck* check) {
+void LCodeGen::ApplyCheckIf(Condition condition, LBoundsCheck* check) {
if (FLAG_debug_code && check->hydrogen()->skip_check()) {
Label done;
- __ b(NegateCondition(cc), &done);
+ __ b(NegateCondition(condition), &done);
__ stop("eliminated bounds check failed");
__ bind(&done);
} else {
- DeoptimizeIf(cc, check->environment());
+ DeoptimizeIf(condition, check->environment());
}
}
Register temp1 = ToRegister(instr->temp());
Register temp2 = ToRegister(instr->temp2());
- bool convert_hole = false;
- HValue* change_input = instr->hydrogen()->value();
- if (change_input->IsLoadKeyed()) {
- HLoadKeyed* load = HLoadKeyed::cast(change_input);
- convert_hole = load->UsesMustHandleHole();
- }
-
- Label no_special_nan_handling;
- Label done;
- if (convert_hole) {
- DwVfpRegister input_reg = ToDoubleRegister(instr->value());
- __ VFPCompareAndSetFlags(input_reg, input_reg);
- __ b(vc, &no_special_nan_handling);
- __ VmovHigh(scratch, input_reg);
- __ cmp(scratch, Operand(kHoleNanUpper32));
- // If not the hole NaN, force the NaN to be canonical.
- __ VFPCanonicalizeNaN(input_reg, ne);
- __ b(ne, &no_special_nan_handling);
- __ Move(reg, factory()->the_hole_value());
- __ b(&done);
- }
-
- __ bind(&no_special_nan_handling);
DeferredNumberTagD* deferred = new(zone()) DeferredNumberTagD(this, instr);
if (FLAG_inline_new) {
__ LoadRoot(scratch, Heap::kHeapNumberMapRootIndex);
__ vstr(input_reg, reg, HeapNumber::kValueOffset);
// Now that we have finished with the object's real address tag it
__ add(reg, reg, Operand(kHeapObjectTag));
- __ bind(&done);
}
void LCodeGen::EmitNumberUntagD(Register input_reg,
DwVfpRegister result_reg,
- bool allow_undefined_as_nan,
+ bool can_convert_undefined_to_nan,
bool deoptimize_on_minus_zero,
LEnvironment* env,
NumberUntagDMode mode) {
Label load_smi, heap_number, done;
- STATIC_ASSERT(NUMBER_CANDIDATE_IS_ANY_TAGGED_CONVERT_HOLE >
- NUMBER_CANDIDATE_IS_ANY_TAGGED);
- if (mode >= NUMBER_CANDIDATE_IS_ANY_TAGGED) {
+ if (mode == NUMBER_CANDIDATE_IS_ANY_TAGGED) {
// Smi check.
__ UntagAndJumpIfSmi(scratch, input_reg, &load_smi);
__ ldr(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset));
__ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
__ cmp(scratch, Operand(ip));
- if (!allow_undefined_as_nan) {
+ if (!can_convert_undefined_to_nan) {
DeoptimizeIf(ne, env);
} else {
Label heap_number, convert;
// Convert undefined (and hole) to NaN.
__ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
__ cmp(input_reg, Operand(ip));
- if (mode == NUMBER_CANDIDATE_IS_ANY_TAGGED_CONVERT_HOLE) {
- __ b(eq, &convert);
- __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
- __ cmp(input_reg, Operand(ip));
- }
DeoptimizeIf(ne, env);
__ bind(&convert);
Register input_reg = ToRegister(input);
DwVfpRegister result_reg = ToDoubleRegister(result);
- NumberUntagDMode mode = NUMBER_CANDIDATE_IS_ANY_TAGGED;
HValue* value = instr->hydrogen()->value();
- if (value->type().IsSmi()) {
- mode = NUMBER_CANDIDATE_IS_SMI;
- } else if (value->IsLoadKeyed()) {
- HLoadKeyed* load = HLoadKeyed::cast(value);
- if (load->UsesMustHandleHole()) {
- if (load->hole_mode() == ALLOW_RETURN_HOLE) {
- mode = NUMBER_CANDIDATE_IS_ANY_TAGGED_CONVERT_HOLE;
- }
- }
- }
+ NumberUntagDMode mode = value->representation().IsSmi()
+ ? NUMBER_CANDIDATE_IS_SMI : NUMBER_CANDIDATE_IS_ANY_TAGGED;
EmitNumberUntagD(input_reg, result_reg,
- instr->hydrogen()->allow_undefined_as_nan(),
+ instr->hydrogen()->can_convert_undefined_to_nan(),
instr->hydrogen()->deoptimize_on_minus_zero(),
instr->environment(),
mode);
void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
Safepoint::DeoptMode mode);
- void DeoptimizeIf(Condition cc,
+ void DeoptimizeIf(Condition condition,
LEnvironment* environment,
Deoptimizer::BailoutType bailout_type);
- void DeoptimizeIf(Condition cc, LEnvironment* environment);
- void ApplyCheckIf(Condition cc, LBoundsCheck* check);
+ void DeoptimizeIf(Condition condition, LEnvironment* environment);
+ void ApplyCheckIf(Condition condition, LBoundsCheck* check);
void AddToTranslation(LEnvironment* environment,
Translation* translation,
static Condition TokenToCondition(Token::Value op, bool is_unsigned);
void EmitGoto(int block);
template<class InstrType>
- void EmitBranch(InstrType instr, Condition cc);
+ void EmitBranch(InstrType instr, Condition condition);
+ template<class InstrType>
+ void EmitFalseBranch(InstrType instr, Condition condition);
void EmitNumberUntagD(Register input,
DwVfpRegister result,
bool allow_undefined_as_nan,
}
-bool HValue::CheckUsesForFlag(Flag f) {
+bool HValue::CheckUsesForFlag(Flag f) const {
for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
if (it.value()->IsSimulate()) continue;
if (!it.value()->CheckFlag(f)) return false;
}
-bool HValue::HasAtLeastOneUseWithFlagAndNoneWithout(Flag f) {
+bool HValue::HasAtLeastOneUseWithFlagAndNoneWithout(Flag f) const {
bool return_value = false;
for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
if (it.value()->IsSimulate()) continue;
if (val->representation().IsSmiOrInteger32()) {
if (!val->representation().Equals(representation())) {
HChange* result = new(block()->zone()) HChange(
- val, representation(), false, false, false);
+ val, representation(), false, false);
result->InsertBefore(this);
return result;
}
if (new_left == NULL &&
hdiv->observed_input_representation(1).IsSmiOrInteger32()) {
new_left = new(block()->zone()) HChange(
- left, Representation::Integer32(), false, false, false);
+ left, Representation::Integer32(), false, false);
HChange::cast(new_left)->InsertBefore(this);
}
HValue* new_right =
#endif
hdiv->observed_input_representation(2).IsSmiOrInteger32()) {
new_right = new(block()->zone()) HChange(
- right, Representation::Integer32(), false, false, false);
+ right, Representation::Integer32(), false, false);
HChange::cast(new_right)->InsertBefore(this);
}
}
+void HCompareHoleAndBranch::PrintDataTo(StringStream* stream) {
+ object()->PrintNameTo(stream);
+ HControlInstruction::PrintDataTo(stream);
+}
+
+
+void HCompareHoleAndBranch::InferRepresentation(
+ HInferRepresentationPhase* h_infer) {
+ ChangeRepresentation(object()->representation());
+}
+
+
void HGoto::PrintDataTo(StringStream* stream) {
stream->Add("B%d", SuccessorAt(0)->block_id());
}
bool HLoadKeyed::AllUsesCanTreatHoleAsNaN() const {
- if (!IsFastDoubleElementsKind(elements_kind())) {
- return false;
- }
-
- for (HUseIterator it(uses()); !it.Done(); it.Advance()) {
- HValue* use = it.value();
- if (!use->CheckFlag(HValue::kAllowUndefinedAsNaN)) {
- return false;
- }
- }
-
- return true;
+ return IsFastDoubleElementsKind(elements_kind()) &&
+ CheckUsesForFlag(HValue::kAllowUndefinedAsNaN);
}
V(ClampToUint8) \
V(ClassOfTestAndBranch) \
V(CompareNumericAndBranch) \
+ V(CompareHoleAndBranch) \
V(CompareGeneric) \
V(CompareObjectEqAndBranch) \
V(CompareMap) \
bool CheckFlag(Flag f) const { return (flags_ & (1 << f)) != 0; }
// Returns true if the flag specified is set for all uses, false otherwise.
- bool CheckUsesForFlag(Flag f);
+ bool CheckUsesForFlag(Flag f) const;
// Returns true if the flag specified is set for all uses, and this set
// of uses is non-empty.
- bool HasAtLeastOneUseWithFlagAndNoneWithout(Flag f);
+ bool HasAtLeastOneUseWithFlagAndNoneWithout(Flag f) const;
GVNFlagSet gvn_flags() const { return gvn_flags_; }
void SetGVNFlag(GVNFlag f) { gvn_flags_.Add(f); }
HChange(HValue* value,
Representation to,
bool is_truncating_to_smi,
- bool is_truncating_to_int32,
- bool allow_undefined_as_nan)
+ bool is_truncating_to_int32)
: HUnaryOperation(value) {
ASSERT(!value->representation().IsNone());
ASSERT(!to.IsNone());
ASSERT(!value->representation().Equals(to));
set_representation(to);
SetFlag(kUseGVN);
- if (allow_undefined_as_nan) SetFlag(kAllowUndefinedAsNaN);
if (is_truncating_to_smi) SetFlag(kTruncatingToSmi);
if (is_truncating_to_int32) SetFlag(kTruncatingToInt32);
if (value->representation().IsSmi() || value->type().IsSmi()) {
}
}
+ bool can_convert_undefined_to_nan() {
+ return CheckUsesForFlag(kAllowUndefinedAsNaN);
+ }
+
virtual HValue* EnsureAndPropagateNotMinusZero(BitVector* visited);
virtual HType CalculateInferredType();
virtual HValue* Canonicalize();
Representation from() const { return value()->representation(); }
Representation to() const { return representation(); }
- bool allow_undefined_as_nan() const {
- return CheckFlag(kAllowUndefinedAsNaN);
- }
bool deoptimize_on_minus_zero() const {
return CheckFlag(kBailoutOnMinusZero);
}
};
+class HCompareHoleAndBranch: public HTemplateControlInstruction<2, 1> {
+ public:
+ // TODO(danno): make this private when the IfBuilder properly constructs
+ // control flow instructions.
+ explicit HCompareHoleAndBranch(HValue* object) {
+ SetFlag(kFlexibleRepresentation);
+ SetFlag(kAllowUndefinedAsNaN);
+ SetOperandAt(0, object);
+ }
+
+ DECLARE_INSTRUCTION_FACTORY_P1(HCompareHoleAndBranch, HValue*);
+
+ HValue* object() { return OperandAt(0); }
+
+ virtual void InferRepresentation(HInferRepresentationPhase* h_infer);
+
+ virtual Representation RequiredInputRepresentation(int index) {
+ return representation();
+ }
+
+ virtual void PrintDataTo(StringStream* stream);
+
+ DECLARE_CONCRETE_INSTRUCTION(CompareHoleAndBranch)
+};
+
+
class HCompareObjectEqAndBranch: public HTemplateControlInstruction<2, 2> {
public:
// TODO(danno): make this private when the IfBuilder properly constructs
}
}
+ virtual void RepresentationChanged(Representation to) {
+ if (to.IsTagged()) ClearFlag(kAllowUndefinedAsNaN);
+ HArithmeticBinaryOperation::RepresentationChanged(to);
+ }
+
DECLARE_CONCRETE_INSTRUCTION(Add)
protected:
const ZoneList<HPhi*>* phi_list = graph()->phi_list();
for (int i = 0; i < phi_list->length(); i++) {
HPhi* phi = phi_list->at(i);
- if (phi->CheckFlag(HValue::kAllowUndefinedAsNaN)) {
- for (HUseIterator it(phi->uses()); !it.Done(); it.Advance()) {
- HValue* use_value = it.value();
- if (!use_value->CheckFlag(HValue::kAllowUndefinedAsNaN)) {
- ProcessPhi(phi);
- break;
- }
- }
+ if (phi->CheckFlag(HValue::kAllowUndefinedAsNaN) &&
+ !phi->CheckUsesForFlag(HValue::kAllowUndefinedAsNaN)) {
+ ProcessPhi(phi);
}
}
}
}
}
+
+void HComputeChangeUndefinedToNaN::Run() {
+ const ZoneList<HBasicBlock*>* blocks(graph()->blocks());
+ for (int i = 0; i < blocks->length(); ++i) {
+ const HBasicBlock* block(blocks->at(i));
+ for (HInstruction* current = block->first(); current != NULL; ) {
+ HInstruction* next = current->next();
+ if (current->IsChange()) {
+ if (HChange::cast(current)->can_convert_undefined_to_nan()) {
+ current->SetFlag(HValue::kAllowUndefinedAsNaN);
+ }
+ }
+ current = next;
+ }
+ }
+}
+
+
} } // namespace v8::internal
};
+class HComputeChangeUndefinedToNaN : public HPhase {
+ public:
+ explicit HComputeChangeUndefinedToNaN(HGraph* graph)
+ : HPhase("H_Compute change undefined to nan", graph) {}
+
+ void Run();
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(HComputeChangeUndefinedToNaN);
+};
+
+
} } // namespace v8::internal
#endif // V8_HYDROGEN_MARK_DEOPTIMIZE_H_
HInstruction* new_value = NULL;
bool is_truncating_to_smi = use_value->CheckFlag(HValue::kTruncatingToSmi);
bool is_truncating_to_int = use_value->CheckFlag(HValue::kTruncatingToInt32);
- bool allow_undefined_as_nan =
- use_value->CheckFlag(HValue::kAllowUndefinedAsNaN);
if (value->IsConstant()) {
HConstant* constant = HConstant::cast(value);
// Try to create a new copy of the constant with the new representation.
}
if (new_value == NULL) {
- new_value = new(graph()->zone()) HChange(value, to,
- is_truncating_to_smi,
- is_truncating_to_int,
- allow_undefined_as_nan);
+ new_value = new(graph()->zone()) HChange(
+ value, to, is_truncating_to_smi, is_truncating_to_int);
}
new_value->InsertBefore(next);
from_elements_kind,
ALLOW_RETURN_HOLE);
- ElementsKind holey_kind = IsFastSmiElementsKind(to_elements_kind)
+ ElementsKind kind = (IsHoleyElementsKind(from_elements_kind) &&
+ IsFastSmiElementsKind(to_elements_kind))
? FAST_HOLEY_ELEMENTS : to_elements_kind;
- HInstruction* holey_store = Add<HStoreKeyed>(to_elements, key,
- element, holey_kind);
- // Allow NaN hole values to converted to their tagged counterparts.
- if (IsFastHoleyElementsKind(to_elements_kind)) {
- holey_store->SetFlag(HValue::kAllowUndefinedAsNaN);
+
+ if (IsHoleyElementsKind(from_elements_kind) &&
+ from_elements_kind != to_elements_kind) {
+ IfBuilder if_hole(this);
+ if_hole.If<HCompareHoleAndBranch>(element);
+ if_hole.Then();
+ HConstant* hole_constant = IsFastDoubleElementsKind(to_elements_kind)
+ ? Add<HConstant>(FixedDoubleArray::hole_nan_as_double())
+ : graph()->GetConstantHole();
+ Add<HStoreKeyed>(to_elements, key, hole_constant, kind);
+ if_hole.Else();
+ HStoreKeyed* store = Add<HStoreKeyed>(to_elements, key, element, kind);
+ store->SetFlag(HValue::kAllowUndefinedAsNaN);
+ if_hole.End();
+ } else {
+ HStoreKeyed* store = Add<HStoreKeyed>(to_elements, key, element, kind);
+ store->SetFlag(HValue::kAllowUndefinedAsNaN);
}
builder.EndBody();
if (FLAG_use_range) Run<HRangeAnalysisPhase>();
+ Run<HComputeChangeUndefinedToNaN>();
Run<HComputeMinusZeroChecksPhase>();
// Eliminate redundant stack checks on backwards branches.
}
+template<class InstrType>
+void LCodeGen::EmitFalseBranch(InstrType instr, Condition cc) {
+ int false_block = instr->FalseDestination(chunk_);
+ __ j(cc, chunk_->GetAssemblyLabel(false_block));
+}
+
+
void LCodeGen::DoIsNumberAndBranch(LIsNumberAndBranch* instr) {
Representation r = instr->hydrogen()->value()->representation();
if (r.IsSmiOrInteger32() || r.IsDouble()) {
}
+void LCodeGen::DoCmpHoleAndBranch(LCmpHoleAndBranch* instr) {
+ if (instr->hydrogen()->representation().IsTagged()) {
+ Register input_reg = ToRegister(instr->object());
+ __ cmp(input_reg, factory()->the_hole_value());
+ EmitBranch(instr, equal);
+ return;
+ }
+
+ bool use_sse2 = CpuFeatures::IsSupported(SSE2);
+ if (use_sse2) {
+ CpuFeatureScope scope(masm(), SSE2);
+ XMMRegister input_reg = ToDoubleRegister(instr->object());
+ __ ucomisd(input_reg, input_reg);
+ } else {
+ // Put the value to the top of stack
+ X87Register src = ToX87Register(instr->object());
+ X87LoadForUsage(src);
+ __ fld(0);
+ __ fld(0);
+ __ FCmp();
+ }
+
+ EmitFalseBranch(instr, parity_odd);
+
+ __ sub(esp, Immediate(kDoubleSize));
+ if (use_sse2) {
+ CpuFeatureScope scope(masm(), SSE2);
+ XMMRegister input_reg = ToDoubleRegister(instr->object());
+ __ movdbl(MemOperand(esp, 0), input_reg);
+ } else {
+ __ fld(0);
+ __ fstp_d(MemOperand(esp, 0));
+ }
+
+ __ add(esp, Immediate(kDoubleSize));
+ __ cmp(MemOperand(esp, -sizeof(kHoleNanUpper32)), Immediate(kHoleNanUpper32));
+ EmitBranch(instr, equal);
+}
+
+
Condition LCodeGen::EmitIsObject(Register input,
Register temp1,
Label* is_not_object,
Register reg = ToRegister(instr->result());
- bool convert_hole = false;
- HValue* change_input = instr->hydrogen()->value();
- if (change_input->IsLoadKeyed()) {
- HLoadKeyed* load = HLoadKeyed::cast(change_input);
- convert_hole = load->UsesMustHandleHole();
- }
-
bool use_sse2 = CpuFeatures::IsSupported(SSE2);
if (!use_sse2) {
// Put the value to the top of stack
X87LoadForUsage(src);
}
- Label no_special_nan_handling;
- Label done;
- if (convert_hole) {
- if (use_sse2) {
- CpuFeatureScope scope(masm(), SSE2);
- XMMRegister input_reg = ToDoubleRegister(instr->value());
- __ ucomisd(input_reg, input_reg);
- } else {
- __ fld(0);
- __ fld(0);
- __ FCmp();
- }
-
- __ j(parity_odd, &no_special_nan_handling);
- __ sub(esp, Immediate(kDoubleSize));
- if (use_sse2) {
- CpuFeatureScope scope(masm(), SSE2);
- XMMRegister input_reg = ToDoubleRegister(instr->value());
- __ movdbl(MemOperand(esp, 0), input_reg);
- } else {
- __ fld(0);
- __ fstp_d(MemOperand(esp, 0));
- }
- __ cmp(MemOperand(esp, sizeof(kHoleNanLower32)),
- Immediate(kHoleNanUpper32));
- Label canonicalize;
- __ j(not_equal, &canonicalize);
- __ add(esp, Immediate(kDoubleSize));
- __ mov(reg, factory()->the_hole_value());
- if (!use_sse2) {
- __ fstp(0);
- }
- __ jmp(&done);
- __ bind(&canonicalize);
- __ add(esp, Immediate(kDoubleSize));
- ExternalReference nan =
- ExternalReference::address_of_canonical_non_hole_nan();
- if (use_sse2) {
- CpuFeatureScope scope(masm(), SSE2);
- XMMRegister input_reg = ToDoubleRegister(instr->value());
- __ movdbl(input_reg, Operand::StaticVariable(nan));
- } else {
- __ fstp(0);
- __ fld_d(Operand::StaticVariable(nan));
- }
- }
-
- __ bind(&no_special_nan_handling);
DeferredNumberTagD* deferred = new(zone()) DeferredNumberTagD(this, instr);
if (FLAG_inline_new) {
Register tmp = ToRegister(instr->temp());
} else {
__ fstp_d(FieldOperand(reg, HeapNumber::kValueOffset));
}
- __ bind(&done);
}
void LCodeGen::EmitNumberUntagDNoSSE2(Register input_reg,
Register temp_reg,
X87Register res_reg,
- bool allow_undefined_as_nan,
+ bool can_convert_undefined_to_nan,
bool deoptimize_on_minus_zero,
LEnvironment* env,
NumberUntagDMode mode) {
Label load_smi, done;
X87PrepareToWrite(res_reg);
- STATIC_ASSERT(NUMBER_CANDIDATE_IS_ANY_TAGGED_CONVERT_HOLE >
- NUMBER_CANDIDATE_IS_ANY_TAGGED);
- if (mode >= NUMBER_CANDIDATE_IS_ANY_TAGGED) {
+ if (mode == NUMBER_CANDIDATE_IS_ANY_TAGGED) {
// Smi check.
__ JumpIfSmi(input_reg, &load_smi, Label::kNear);
// Heap number map check.
__ cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
factory()->heap_number_map());
- if (!allow_undefined_as_nan) {
+ if (!can_convert_undefined_to_nan) {
DeoptimizeIf(not_equal, env);
} else {
Label heap_number, convert;
// Convert undefined (or hole) to NaN.
__ cmp(input_reg, factory()->undefined_value());
- if (mode == NUMBER_CANDIDATE_IS_ANY_TAGGED_CONVERT_HOLE) {
- __ j(equal, &convert, Label::kNear);
- __ cmp(input_reg, factory()->the_hole_value());
- }
DeoptimizeIf(not_equal, env);
__ bind(&convert);
void LCodeGen::EmitNumberUntagD(Register input_reg,
Register temp_reg,
XMMRegister result_reg,
- bool allow_undefined_as_nan,
+ bool can_convert_undefined_to_nan,
bool deoptimize_on_minus_zero,
LEnvironment* env,
NumberUntagDMode mode) {
Label load_smi, done;
- STATIC_ASSERT(NUMBER_CANDIDATE_IS_ANY_TAGGED_CONVERT_HOLE >
- NUMBER_CANDIDATE_IS_ANY_TAGGED);
- if (mode >= NUMBER_CANDIDATE_IS_ANY_TAGGED) {
+ if (mode == NUMBER_CANDIDATE_IS_ANY_TAGGED) {
// Smi check.
__ JumpIfSmi(input_reg, &load_smi, Label::kNear);
// Heap number map check.
__ cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
factory()->heap_number_map());
- if (!allow_undefined_as_nan) {
+ if (!can_convert_undefined_to_nan) {
DeoptimizeIf(not_equal, env);
} else {
Label heap_number, convert;
// Convert undefined (and hole) to NaN.
__ cmp(input_reg, factory()->undefined_value());
- if (mode == NUMBER_CANDIDATE_IS_ANY_TAGGED_CONVERT_HOLE) {
- __ j(equal, &convert, Label::kNear);
- __ cmp(input_reg, factory()->the_hole_value());
- }
DeoptimizeIf(not_equal, env);
__ bind(&convert);
instr->hydrogen()->deoptimize_on_minus_zero();
Register temp_reg = deoptimize_on_minus_zero ? ToRegister(temp) : no_reg;
- NumberUntagDMode mode = NUMBER_CANDIDATE_IS_ANY_TAGGED;
HValue* value = instr->hydrogen()->value();
- if (value->representation().IsSmi()) {
- mode = NUMBER_CANDIDATE_IS_SMI;
- } else if (value->IsLoadKeyed()) {
- HLoadKeyed* load = HLoadKeyed::cast(value);
- if (load->UsesMustHandleHole()) {
- mode = NUMBER_CANDIDATE_IS_ANY_TAGGED_CONVERT_HOLE;
- }
- }
+ NumberUntagDMode mode = value->representation().IsSmi()
+ ? NUMBER_CANDIDATE_IS_SMI : NUMBER_CANDIDATE_IS_ANY_TAGGED;
if (CpuFeatures::IsSupported(SSE2)) {
CpuFeatureScope scope(masm(), SSE2);
EmitNumberUntagD(input_reg,
temp_reg,
result_reg,
- instr->hydrogen()->allow_undefined_as_nan(),
+ instr->hydrogen()->can_convert_undefined_to_nan(),
deoptimize_on_minus_zero,
instr->environment(),
mode);
EmitNumberUntagDNoSSE2(input_reg,
temp_reg,
ToX87Register(instr->result()),
- instr->hydrogen()->allow_undefined_as_nan(),
+ instr->hydrogen()->can_convert_undefined_to_nan(),
deoptimize_on_minus_zero,
instr->environment(),
mode);
void EmitGoto(int block);
template<class InstrType>
void EmitBranch(InstrType instr, Condition cc);
+ template<class InstrType>
+ void EmitFalseBranch(InstrType instr, Condition cc);
void EmitNumberUntagD(
Register input,
Register temp,
}
+LInstruction* LChunkBuilder::DoCompareHoleAndBranch(
+ HCompareHoleAndBranch* instr) {
+ LOperand* object = UseRegisterAtStart(instr->object());
+ return new(zone()) LCmpHoleAndBranch(object);
+}
+
+
LInstruction* LChunkBuilder::DoIsObjectAndBranch(HIsObjectAndBranch* instr) {
ASSERT(instr->value()->representation().IsSmiOrTagged());
LOperand* temp = TempRegister();
V(ClassOfTestAndBranch) \
V(CompareNumericAndBranch) \
V(CmpObjectEqAndBranch) \
+ V(CmpHoleAndBranch) \
V(CmpMapAndBranch) \
V(CmpT) \
V(ConstantD) \
LOperand* left() { return inputs_[0]; }
LOperand* right() { return inputs_[1]; }
- DECLARE_CONCRETE_INSTRUCTION(CmpObjectEqAndBranch,
- "cmp-object-eq-and-branch")
+ DECLARE_CONCRETE_INSTRUCTION(CmpObjectEqAndBranch, "cmp-object-eq-and-branch")
+};
+
+
+class LCmpHoleAndBranch: public LControlInstruction<1, 0> {
+ public:
+ explicit LCmpHoleAndBranch(LOperand* object) {
+ inputs_[0] = object;
+ }
+
+ LOperand* object() { return inputs_[0]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(CmpHoleAndBranch, "cmp-hole-and-branch")
+ DECLARE_HYDROGEN_ACCESSOR(CompareHoleAndBranch)
};
enum NumberUntagDMode {
NUMBER_CANDIDATE_IS_SMI,
- NUMBER_CANDIDATE_IS_ANY_TAGGED,
- NUMBER_CANDIDATE_IS_ANY_TAGGED_CONVERT_HOLE
+ NUMBER_CANDIDATE_IS_ANY_TAGGED
};
__ Move(reg, scratch0(), input_reg);
Label canonicalize;
__ Branch(&canonicalize, ne, scratch0(), Operand(kHoleNanUpper32));
- __ li(reg, factory()->the_hole_value());
+ __ li(reg, factory()->undefined_value());
__ Branch(&done);
__ bind(&canonicalize);
__ Move(input_reg,
}
+template<class InstrType>
+void LCodeGen::EmitFalseBranch(InstrType instr, Condition cc) {
+ int false_block = instr->FalseDestination(chunk_);
+ __ j(cc, chunk_->GetAssemblyLabel(false_block));
+}
+
+
void LCodeGen::DoDebugBreak(LDebugBreak* instr) {
__ int3();
}
}
+void LCodeGen::DoCmpHoleAndBranch(LCmpHoleAndBranch* instr) {
+ if (instr->hydrogen()->representation().IsTagged()) {
+ Register input_reg = ToRegister(instr->object());
+ __ Cmp(input_reg, factory()->the_hole_value());
+ EmitBranch(instr, equal);
+ return;
+ }
+
+ XMMRegister input_reg = ToDoubleRegister(instr->object());
+ __ ucomisd(input_reg, input_reg);
+ EmitFalseBranch(instr, parity_odd);
+
+ __ subq(rsp, Immediate(kDoubleSize));
+ __ movsd(MemOperand(rsp, 0), input_reg);
+ __ addq(rsp, Immediate(kDoubleSize));
+
+ int size = sizeof(kHoleNanUpper32);
+ __ cmpl(MemOperand(rsp, -size),
+ Immediate(kHoleNanUpper32));
+ EmitBranch(instr, equal);
+}
+
+
Condition LCodeGen::EmitIsObject(Register input,
Label* is_not_object,
Label* is_object) {
Register reg = ToRegister(instr->result());
Register tmp = ToRegister(instr->temp());
- bool convert_hole = false;
- HValue* change_input = instr->hydrogen()->value();
- if (change_input->IsLoadKeyed()) {
- HLoadKeyed* load = HLoadKeyed::cast(change_input);
- convert_hole = load->UsesMustHandleHole();
- }
-
- Label no_special_nan_handling;
- Label done;
- if (convert_hole) {
- XMMRegister input_reg = ToDoubleRegister(instr->value());
- __ ucomisd(input_reg, input_reg);
- __ j(parity_odd, &no_special_nan_handling);
- __ subq(rsp, Immediate(kDoubleSize));
- __ movsd(MemOperand(rsp, 0), input_reg);
- __ cmpl(MemOperand(rsp, sizeof(kHoleNanLower32)),
- Immediate(kHoleNanUpper32));
- Label canonicalize;
- __ j(not_equal, &canonicalize);
- __ addq(rsp, Immediate(kDoubleSize));
- __ Move(reg, factory()->the_hole_value());
- __ jmp(&done);
- __ bind(&canonicalize);
- __ addq(rsp, Immediate(kDoubleSize));
- __ Set(kScratchRegister, BitCast<uint64_t>(
- FixedDoubleArray::canonical_not_the_hole_nan_as_double()));
- __ movq(input_reg, kScratchRegister);
- }
-
- __ bind(&no_special_nan_handling);
DeferredNumberTagD* deferred = new(zone()) DeferredNumberTagD(this, instr);
if (FLAG_inline_new) {
__ AllocateHeapNumber(reg, tmp, deferred->entry());
}
__ bind(deferred->exit());
__ movsd(FieldOperand(reg, HeapNumber::kValueOffset), input_reg);
-
- __ bind(&done);
}
void LCodeGen::EmitNumberUntagD(Register input_reg,
XMMRegister result_reg,
- bool allow_undefined_as_nan,
+ bool can_convert_undefined_to_nan,
bool deoptimize_on_minus_zero,
LEnvironment* env,
NumberUntagDMode mode) {
Label load_smi, done;
- STATIC_ASSERT(NUMBER_CANDIDATE_IS_ANY_TAGGED_CONVERT_HOLE >
- NUMBER_CANDIDATE_IS_ANY_TAGGED);
- if (mode >= NUMBER_CANDIDATE_IS_ANY_TAGGED) {
+ if (mode == NUMBER_CANDIDATE_IS_ANY_TAGGED) {
// Smi check.
__ JumpIfSmi(input_reg, &load_smi, Label::kNear);
// Heap number map check.
__ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset),
Heap::kHeapNumberMapRootIndex);
- if (!allow_undefined_as_nan) {
+ if (!can_convert_undefined_to_nan) {
DeoptimizeIf(not_equal, env);
} else {
Label heap_number, convert;
// Convert undefined (and hole) to NaN. Compute NaN as 0/0.
__ CompareRoot(input_reg, Heap::kUndefinedValueRootIndex);
- if (mode == NUMBER_CANDIDATE_IS_ANY_TAGGED_CONVERT_HOLE) {
- __ j(equal, &convert, Label::kNear);
- __ CompareRoot(input_reg, Heap::kTheHoleValueRootIndex);
- }
DeoptimizeIf(not_equal, env);
__ bind(&convert);
Register input_reg = ToRegister(input);
XMMRegister result_reg = ToDoubleRegister(result);
- NumberUntagDMode mode = NUMBER_CANDIDATE_IS_ANY_TAGGED;
HValue* value = instr->hydrogen()->value();
- if (value->type().IsSmi()) {
- mode = NUMBER_CANDIDATE_IS_SMI;
- } else if (value->IsLoadKeyed()) {
- HLoadKeyed* load = HLoadKeyed::cast(value);
- if (load->UsesMustHandleHole()) {
- mode = NUMBER_CANDIDATE_IS_ANY_TAGGED_CONVERT_HOLE;
- }
- }
+ NumberUntagDMode mode = value->representation().IsSmi()
+ ? NUMBER_CANDIDATE_IS_SMI : NUMBER_CANDIDATE_IS_ANY_TAGGED;
EmitNumberUntagD(input_reg, result_reg,
- instr->hydrogen()->allow_undefined_as_nan(),
+ instr->hydrogen()->can_convert_undefined_to_nan(),
instr->hydrogen()->deoptimize_on_minus_zero(),
instr->environment(),
mode);
void EmitGoto(int block);
template<class InstrType>
void EmitBranch(InstrType instr, Condition cc);
+ template<class InstrType>
+ void EmitFalseBranch(InstrType instr, Condition cc);
void EmitNumberUntagD(
Register input,
XMMRegister result,
}
+LInstruction* LChunkBuilder::DoCompareHoleAndBranch(
+ HCompareHoleAndBranch* instr) {
+ LOperand* object = UseRegisterAtStart(instr->object());
+ return new(zone()) LCmpHoleAndBranch(object);
+}
+
+
LInstruction* LChunkBuilder::DoIsObjectAndBranch(HIsObjectAndBranch* instr) {
ASSERT(instr->value()->representation().IsTagged());
return new(zone()) LIsObjectAndBranch(UseRegisterAtStart(instr->value()));
V(ClassOfTestAndBranch) \
V(CompareNumericAndBranch) \
V(CmpObjectEqAndBranch) \
+ V(CmpHoleAndBranch) \
V(CmpMapAndBranch) \
V(CmpT) \
V(ConstantD) \
LOperand* left() { return inputs_[0]; }
LOperand* right() { return inputs_[1]; }
- DECLARE_CONCRETE_INSTRUCTION(CmpObjectEqAndBranch,
- "cmp-object-eq-and-branch")
+ DECLARE_CONCRETE_INSTRUCTION(CmpObjectEqAndBranch, "cmp-object-eq-and-branch")
+};
+
+
+class LCmpHoleAndBranch: public LControlInstruction<1, 0> {
+ public:
+ explicit LCmpHoleAndBranch(LOperand* object) {
+ inputs_[0] = object;
+ }
+
+ LOperand* object() { return inputs_[0]; }
+
+ DECLARE_CONCRETE_INSTRUCTION(CmpHoleAndBranch, "cmp-hole-and-branch")
+ DECLARE_HYDROGEN_ACCESSOR(CompareHoleAndBranch)
};
--- /dev/null
+// Copyright 2013 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+// Flags: --allow-natives-syntax --notrack-allocation-sites
+
+// Test adding undefined from hole in double-holey to string.
+var a = [1.5, , 1.8];
+
+function f(a, i, l) {
+ var v = a[i];
+ return l + v;
+}
+
+assertEquals("test1.5", f(a, 0, "test"));
+assertEquals("test1.5", f(a, 0, "test"));
+%OptimizeFunctionOnNextCall(f);
+assertEquals("testundefined", f(a, 1, "test"));
+
+// Test double-hole going through a phi to a string-add.
+function f2(b, a1, a2) {
+ var v;
+ if (b) {
+ v = a1[0];
+ } else {
+ v = a2[0];
+ }
+ x = v * 2;
+ return "test" + v + x;
+}
+
+f2(true, [1.4,1.8,,1.9], [1.4,1.8,,1.9]);
+f2(true, [1.4,1.8,,1.9], [1.4,1.8,,1.9]);
+f2(false, [1.4,1.8,,1.9], [1.4,1.8,,1.9]);
+f2(false, [1.4,1.8,,1.9], [1.4,1.8,,1.9]);
+%OptimizeFunctionOnNextCall(f2);
+assertEquals("testundefinedNaN", f2(false, [,1.8,,1.9], [,1.9,,1.9]));
+
+// Test converting smi-hole to double-hole.
+function t_smi(a) {
+ a[0] = 1.5;
+}
+
+t_smi([1,,3]);
+t_smi([1,,3]);
+t_smi([1,,3]);
+%OptimizeFunctionOnNextCall(t_smi);
+var ta = [1,,3];
+t_smi(ta);
+ta.__proto__ = [6,6,6];
+assertEquals([1.5,6,3], ta);
+
+// Test converting double-hole to tagged-hole.
+function t(b) {
+ b[1] = {};
+}
+
+t([1.4, 1.6,,1.8, NaN]);
+t([1.4, 1.6,,1.8, NaN]);
+%OptimizeFunctionOnNextCall(t);
+var a = [1.6, 1.8,,1.9, NaN];
+t(a);
+a.__proto__ = [6,6,6,6,6];
+assertEquals([1.6, {}, 6, 1.9, NaN], a);