void CheckOddball(MacroAssembler* masm,
Type type,
- Handle<Object> value,
+ Heap::RootListIndex value,
bool result,
Label* patch);
void GenerateTypeTransition(MacroAssembler* masm);
}
// undefined -> false
- CheckOddball(masm, UNDEFINED, factory->undefined_value(), false, &patch);
+ CheckOddball(masm, UNDEFINED, Heap::kUndefinedValueRootIndex, false, &patch);
// Boolean -> its value
- CheckOddball(masm, BOOLEAN, factory->false_value(), false, &patch);
- CheckOddball(masm, BOOLEAN, factory->true_value(), true, &patch);
+ CheckOddball(masm, BOOLEAN, Heap::kFalseValueRootIndex, false, &patch);
+ CheckOddball(masm, BOOLEAN, Heap::kTrueValueRootIndex, true, &patch);
- // 'null' -> false.
- CheckOddball(masm, NULL_TYPE, factory->null_value(), false, &patch);
+ // 'null' -> false.!!!
+ CheckOddball(masm, NULL_TYPE, Heap::kNullValueRootIndex, false, &patch);
if (types_.Contains(SMI)) {
// Smis: 0 -> false, all other -> true
void ToBooleanStub::CheckOddball(MacroAssembler* masm,
Type type,
- Handle<Object> value,
+ Heap::RootListIndex value,
bool result,
Label* patch) {
const Register argument = eax;
if (types_.Contains(type)) {
// If we see an expected oddball, return its ToBoolean value tos_.
Label different_value;
- __ cmp(argument, value);
+ __ CompareRoot(argument, value);
__ j(not_equal, &different_value, Label::kNear);
__ Set(tos_, Immediate(result ? 1 : 0));
__ ret(1 * kPointerSize);
} else if (types_.Contains(INTERNAL_OBJECT)) {
// If we see an unexpected oddball and handle internal objects, we must
// patch because the code for internal objects doesn't handle it explictly.
- __ cmp(argument, value);
+ __ CompareRoot(argument, value);
__ j(equal, patch);
}
}
} else {
ASSERT(r.IsTagged());
Register reg = ToRegister(instr->InputAt(0));
- if (instr->hydrogen()->value()->type().IsBoolean()) {
+ HType type = instr->hydrogen()->value()->type();
+ if (type.IsBoolean()) {
__ cmp(reg, factory()->true_value());
EmitBranch(true_block, false_block, equal);
+ } else if (type.IsSmi()) {
+ __ test(reg, Operand(reg));
+ EmitBranch(true_block, false_block, not_equal);
} else {
Label* true_label = chunk_->GetAssemblyLabel(true_block);
Label* false_label = chunk_->GetAssemblyLabel(false_block);
}
+void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
+ // see ROOT_ACCESSOR macro in factory.h
+ Handle<Object> value(BitCast<Object**>(
+ &isolate()->heap()->roots_address()[index]));
+ cmp(with, value);
+}
+
+
void MacroAssembler::CmpObjectType(Register heap_object,
InstanceType type,
Register map) {
void SafeSet(Register dst, const Immediate& x);
void SafePush(const Immediate& x);
+ // Compare a register against a known root, e.g. undefined, null, true, ...
+ void CompareRoot(Register with, Heap::RootListIndex index);
+
// Compare object type for heap object.
// Incoming register is heap_object and outgoing register is map.
void CmpObjectType(Register heap_object, InstanceType type, Register map);
}
-// The stub returns zero for false, and a non-zero value for true.
+// The stub expects its argument on the stack and returns its result in tos_:
+// zero for false, and a non-zero value for true.
void ToBooleanStub::Generate(MacroAssembler* masm) {
- Label false_result, true_result, not_string;
+ Label patch;
+ const Register argument = rax;
const Register map = rdx;
- __ movq(rax, Operand(rsp, 1 * kPointerSize));
+ if (!types_.IsEmpty()) {
+ __ movq(argument, Operand(rsp, 1 * kPointerSize));
+ }
// undefined -> false
- __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
- __ j(equal, &false_result);
+ CheckOddball(masm, UNDEFINED, Heap::kUndefinedValueRootIndex, false, &patch);
// Boolean -> its value
- __ CompareRoot(rax, Heap::kFalseValueRootIndex);
- __ j(equal, &false_result);
- __ CompareRoot(rax, Heap::kTrueValueRootIndex);
- __ j(equal, &true_result);
-
- // Smis: 0 -> false, all other -> true
- __ Cmp(rax, Smi::FromInt(0));
- __ j(equal, &false_result);
- __ JumpIfSmi(rax, &true_result);
+ CheckOddball(masm, BOOLEAN, Heap::kFalseValueRootIndex, false, &patch);
+ CheckOddball(masm, BOOLEAN, Heap::kTrueValueRootIndex, true, &patch);
// 'null' -> false.
- __ CompareRoot(rax, Heap::kNullValueRootIndex);
- __ j(equal, &false_result, Label::kNear);
+ CheckOddball(masm, NULL_TYPE, Heap::kNullValueRootIndex, false, &patch);
+
+ if (types_.Contains(SMI)) {
+ // Smis: 0 -> false, all other -> true
+ Label not_smi;
+ __ JumpIfNotSmi(argument, ¬_smi, Label::kNear);
+ // argument contains the correct return value already
+ if (!tos_.is(argument)) {
+ __ movq(tos_, argument);
+ }
+ __ ret(1 * kPointerSize);
+ __ bind(¬_smi);
+ } else if (types_.NeedsMap()) {
+ // If we need a map later and have a Smi -> patch.
+ __ JumpIfSmi(argument, &patch, Label::kNear);
+ }
- // Get the map of the heap object.
- __ movq(map, FieldOperand(rax, HeapObject::kMapOffset));
+ if (types_.NeedsMap()) {
+ __ movq(map, FieldOperand(argument, HeapObject::kMapOffset));
- // Undetectable -> false.
- __ testb(FieldOperand(map, Map::kBitFieldOffset),
- Immediate(1 << Map::kIsUndetectable));
- __ j(not_zero, &false_result, Label::kNear);
+ // Everything with a map could be undetectable, so check this now.
+ __ testb(FieldOperand(map, Map::kBitFieldOffset),
+ Immediate(1 << Map::kIsUndetectable));
+ // Undetectable -> false.
+ Label not_undetectable;
+ __ j(zero, ¬_undetectable, Label::kNear);
+ __ Set(tos_, 0);
+ __ ret(1 * kPointerSize);
+ __ bind(¬_undetectable);
+ }
- // JavaScript object -> true.
- __ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
- __ j(above_equal, &true_result, Label::kNear);
+ if (types_.Contains(SPEC_OBJECT)) {
+ // spec object -> true.
+ Label not_js_object;
+ __ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
+ __ j(below, ¬_js_object, Label::kNear);
+ __ Set(tos_, 1);
+ __ ret(1 * kPointerSize);
+ __ bind(¬_js_object);
+ } else if (types_.Contains(INTERNAL_OBJECT)) {
+ // We've seen a spec object for the first time -> patch.
+ __ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
+ __ j(above_equal, &patch, Label::kNear);
+ }
- // String value -> false iff empty.
- __ CmpInstanceType(map, FIRST_NONSTRING_TYPE);
- __ j(above_equal, ¬_string, Label::kNear);
- __ cmpq(FieldOperand(rax, String::kLengthOffset), Immediate(0));
- __ j(zero, &false_result, Label::kNear);
- __ jmp(&true_result, Label::kNear);
+ if (types_.Contains(STRING)) {
+ // String value -> false iff empty.
+ Label not_string;
+ __ CmpInstanceType(map, FIRST_NONSTRING_TYPE);
+ __ j(above_equal, ¬_string, Label::kNear);
+ __ movq(tos_, FieldOperand(argument, String::kLengthOffset));
+ __ ret(1 * kPointerSize); // the string length is OK as the return value
+ __ bind(¬_string);
+ } else if (types_.Contains(INTERNAL_OBJECT)) {
+ // We've seen a string for the first time -> patch
+ __ CmpInstanceType(map, FIRST_NONSTRING_TYPE);
+ __ j(below, &patch, Label::kNear);
+ }
- __ bind(¬_string);
- // HeapNumber -> false iff +0, -0, or NaN.
- // These three cases set the zero flag when compared to zero using ucomisd.
- __ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
- __ j(not_equal, &true_result, Label::kNear);
- __ xorps(xmm0, xmm0);
- __ ucomisd(xmm0, FieldOperand(rax, HeapNumber::kValueOffset));
- __ j(zero, &false_result, Label::kNear);
- // Fall through to |true_result|.
+ if (types_.Contains(HEAP_NUMBER)) {
+ // heap number -> false iff +0, -0, or NaN.
+ Label not_heap_number, false_result;
+ __ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
+ __ j(not_equal, ¬_heap_number, Label::kNear);
+ __ xorps(xmm0, xmm0);
+ __ ucomisd(xmm0, FieldOperand(argument, HeapNumber::kValueOffset));
+ __ j(zero, &false_result, Label::kNear);
+ __ Set(tos_, 1);
+ __ ret(1 * kPointerSize);
+ __ bind(&false_result);
+ __ Set(tos_, 0);
+ __ ret(1 * kPointerSize);
+ __ bind(¬_heap_number);
+ } else if (types_.Contains(INTERNAL_OBJECT)) {
+ // We've seen a heap number for the first time -> patch
+ __ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
+ __ j(equal, &patch, Label::kNear);
+ }
- // Return 1/0 for true/false in tos_.
- __ bind(&true_result);
- __ Set(tos_, 1);
- __ ret(1 * kPointerSize);
- __ bind(&false_result);
- __ Set(tos_, 0);
- __ ret(1 * kPointerSize);
+ if (types_.Contains(INTERNAL_OBJECT)) {
+ // internal objects -> true
+ __ Set(tos_, 1);
+ __ ret(1 * kPointerSize);
+ }
+
+ if (!types_.IsAll()) {
+ __ bind(&patch);
+ GenerateTypeTransition(masm);
+ }
+}
+
+
+void ToBooleanStub::CheckOddball(MacroAssembler* masm,
+ Type type,
+ Heap::RootListIndex value,
+ bool result,
+ Label* patch) {
+ const Register argument = rax;
+ if (types_.Contains(type)) {
+ // If we see an expected oddball, return its ToBoolean value tos_.
+ Label different_value;
+ __ CompareRoot(argument, value);
+ __ j(not_equal, &different_value, Label::kNear);
+ __ Set(tos_, result ? 1 : 0);
+ __ ret(1 * kPointerSize);
+ __ bind(&different_value);
+ } else if (types_.Contains(INTERNAL_OBJECT)) {
+ // If we see an unexpected oddball and handle internal objects, we must
+ // patch because the code for internal objects doesn't handle it explictly.
+ __ CompareRoot(argument, value);
+ __ j(equal, patch);
+ }
+}
+
+
+void ToBooleanStub::GenerateTypeTransition(MacroAssembler* masm) {
+ __ pop(rcx); // Get return address, operand is now on top of stack.
+ __ Push(Smi::FromInt(tos_.code()));
+ __ Push(Smi::FromInt(types_.ToByte()));
+ __ push(rcx); // Push return address.
+ // Patch the caller to an appropriate specialized stub and return the
+ // operation result to the caller of the stub.
+ __ TailCallExternalReference(
+ ExternalReference(IC_Utility(IC::kToBoolean_Patch), masm->isolate()),
+ 3,
+ 1);
}
Label* true_label = chunk_->GetAssemblyLabel(true_block);
Label* false_label = chunk_->GetAssemblyLabel(false_block);
- __ CompareRoot(reg, Heap::kUndefinedValueRootIndex);
- __ j(equal, false_label);
- __ CompareRoot(reg, Heap::kTrueValueRootIndex);
- __ j(equal, true_label);
- __ CompareRoot(reg, Heap::kFalseValueRootIndex);
- __ j(equal, false_label);
- __ Cmp(reg, Smi::FromInt(0));
- __ j(equal, false_label);
- __ JumpIfSmi(reg, true_label);
-
- // Test for double values. Plus/minus zero and NaN are false.
- Label call_stub;
- __ CompareRoot(FieldOperand(reg, HeapObject::kMapOffset),
- Heap::kHeapNumberMapRootIndex);
- __ j(not_equal, &call_stub, Label::kNear);
-
- // HeapNumber => false iff +0, -0, or NaN. These three cases set the
- // zero flag when compared to zero using ucomisd.
- __ xorps(xmm0, xmm0);
- __ ucomisd(xmm0, FieldOperand(reg, HeapNumber::kValueOffset));
- __ j(zero, false_label);
- __ jmp(true_label);
-
- // The conversion stub doesn't cause garbage collections so it's
- // safe to not record a safepoint after the call.
- __ bind(&call_stub);
- ToBooleanStub stub(rax);
- __ Pushad();
- __ push(reg);
- __ CallStub(&stub);
- __ testq(rax, rax);
- __ Popad();
- EmitBranch(true_block, false_block, not_zero);
+ ToBooleanStub::Types expected = instr->hydrogen()->expected_input_types();
+ // Avoid deopts in the case where we've never executed this path before.
+ if (expected.IsEmpty()) expected = ToBooleanStub::all_types();
+
+ if (expected.Contains(ToBooleanStub::UNDEFINED)) {
+ // undefined -> false.
+ __ CompareRoot(reg, Heap::kUndefinedValueRootIndex);
+ __ j(equal, false_label);
+ } else if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) {
+ // We've seen undefined for the first time -> deopt.
+ __ CompareRoot(reg, Heap::kUndefinedValueRootIndex);
+ DeoptimizeIf(equal, instr->environment());
+ }
+
+ if (expected.Contains(ToBooleanStub::BOOLEAN)) {
+ // true -> true.
+ __ CompareRoot(reg, Heap::kTrueValueRootIndex);
+ __ j(equal, true_label);
+ } else if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) {
+ // We've seen a boolean for the first time -> deopt.
+ __ CompareRoot(reg, Heap::kTrueValueRootIndex);
+ DeoptimizeIf(equal, instr->environment());
+ }
+
+ if (expected.Contains(ToBooleanStub::BOOLEAN)) {
+ // false -> false.
+ __ CompareRoot(reg, Heap::kFalseValueRootIndex);
+ __ j(equal, false_label);
+ } else if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) {
+ // We've seen a boolean for the first time -> deopt.
+ __ CompareRoot(reg, Heap::kFalseValueRootIndex);
+ DeoptimizeIf(equal, instr->environment());
+ }
+
+ if (expected.Contains(ToBooleanStub::NULL_TYPE)) {
+ // 'null' -> false.
+ __ CompareRoot(reg, Heap::kNullValueRootIndex);
+ __ j(equal, false_label);
+ } else if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) {
+ // We've seen null for the first time -> deopt.
+ __ CompareRoot(reg, Heap::kNullValueRootIndex);
+ DeoptimizeIf(equal, instr->environment());
+ }
+
+ if (expected.Contains(ToBooleanStub::SMI)) {
+ // Smis: 0 -> false, all other -> true.
+ __ Cmp(reg, Smi::FromInt(0));
+ __ j(equal, false_label);
+ __ JumpIfSmi(reg, true_label);
+ } else if (expected.NeedsMap()) {
+ // If we need a map later and have a Smi -> deopt.
+ __ testb(reg, Immediate(kSmiTagMask));
+ DeoptimizeIf(zero, instr->environment());
+ }
+
+ const Register map = kScratchRegister;
+ if (expected.NeedsMap()) {
+ __ movq(map, FieldOperand(reg, HeapObject::kMapOffset));
+ // Everything with a map could be undetectable, so check this now.
+ __ testb(FieldOperand(map, Map::kBitFieldOffset),
+ Immediate(1 << Map::kIsUndetectable));
+ // Undetectable -> false.
+ __ j(not_zero, false_label);
+ }
+
+ if (expected.Contains(ToBooleanStub::SPEC_OBJECT)) {
+ // spec object -> true.
+ __ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
+ __ j(above_equal, true_label);
+ } else if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) {
+ // We've seen a spec object for the first time -> deopt.
+ __ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
+ DeoptimizeIf(above_equal, instr->environment());
+ }
+
+ if (expected.Contains(ToBooleanStub::STRING)) {
+ // String value -> false iff empty.
+ Label not_string;
+ __ CmpInstanceType(map, FIRST_NONSTRING_TYPE);
+ __ j(above_equal, ¬_string, Label::kNear);
+ __ cmpq(FieldOperand(reg, String::kLengthOffset), Immediate(0));
+ __ j(not_zero, true_label);
+ __ jmp(false_label);
+ __ bind(¬_string);
+ } else if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) {
+ // We've seen a string for the first time -> deopt
+ __ CmpInstanceType(map, FIRST_NONSTRING_TYPE);
+ DeoptimizeIf(below, instr->environment());
+ }
+
+ if (expected.Contains(ToBooleanStub::HEAP_NUMBER)) {
+ // heap number -> false iff +0, -0, or NaN.
+ Label not_heap_number;
+ __ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
+ __ j(not_equal, ¬_heap_number, Label::kNear);
+ __ xorps(xmm0, xmm0);
+ __ ucomisd(xmm0, FieldOperand(reg, HeapNumber::kValueOffset));
+ __ j(zero, false_label);
+ __ jmp(true_label);
+ __ bind(¬_heap_number);
+ } else if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) {
+ // We've seen a heap number for the first time -> deopt.
+ __ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
+ DeoptimizeIf(equal, instr->environment());
+ }
+
+ if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) {
+ // internal objects -> true
+ __ jmp(true_label);
+ } else {
+ // We've seen something for the first time -> deopt.
+ DeoptimizeIf(no_condition, instr->environment());
+ }
}
}
}
: instr->SecondSuccessor();
return new LGoto(successor->block_id());
}
- return new LBranch(UseRegisterAtStart(v));
+ LInstruction* branch = new LBranch(UseRegister(v));
+ // When we handle all cases, we never deopt, so we don't need to assign the
+ // environment then.
+ bool all_cases_handled = instr->expected_input_types().IsAll();
+ return all_cases_handled ? branch : AssignEnvironment(branch);
}