}
-// Generate stub code for instanceof.
-// This code can patch a call site inlined cache of the instance of check,
-// which looks like this.
-//
-// 81 ff XX XX XX XX cmp edi, <the hole, patched to a map>
-// 75 0a jne <some near label>
-// b8 XX XX XX XX mov eax, <the hole, patched to either true or false>
-//
-// If call site patching is requested the stack will have the delta from the
-// return address to the cmp instruction just below the return address. This
-// also means that call site patching can only take place with arguments in
-// registers. TOS looks like this when call site patching is requested
-//
-// esp[0] : return address
-// esp[4] : delta from return address to cmp instruction
-//
-void InstanceofStub::Generate(MacroAssembler* masm) {
- // Call site inlining and patching implies arguments in registers.
- DCHECK(HasArgsInRegisters() || !HasCallSiteInlineCheck());
-
- // Fixed register usage throughout the stub.
- Register object = eax; // Object (lhs).
- Register map = ebx; // Map of the object.
- Register function = edx; // Function (rhs).
- Register prototype = edi; // Prototype of the function.
- Register scratch = ecx;
-
- // Constants describing the call site code to patch.
- static const int kDeltaToCmpImmediate = 2;
- static const int kDeltaToMov = 8;
- static const int kDeltaToMovImmediate = 9;
- static const int8_t kCmpEdiOperandByte1 = bit_cast<int8_t, uint8_t>(0x3b);
- static const int8_t kCmpEdiOperandByte2 = bit_cast<int8_t, uint8_t>(0x3d);
- static const int8_t kMovEaxImmediateByte = bit_cast<int8_t, uint8_t>(0xb8);
-
- DCHECK_EQ(object.code(), InstanceofStub::left().code());
- DCHECK_EQ(function.code(), InstanceofStub::right().code());
-
- // Get the object and function - they are always both needed.
- Label slow, not_js_object;
- if (!HasArgsInRegisters()) {
- __ mov(object, Operand(esp, 2 * kPointerSize));
- __ mov(function, Operand(esp, 1 * kPointerSize));
- }
+void InstanceOfStub::Generate(MacroAssembler* masm) {
+ Register const object = edx; // Object (lhs).
+ Register const function = eax; // Function (rhs).
+ Register const object_map = ecx; // Map of {object}.
+ Register const function_map = ebx; // Map of {function}.
+ Register const function_prototype = function_map; // Prototype of {function}.
+ Register const scratch = edi;
+
+ DCHECK(object.is(InstanceOfDescriptor::LeftRegister()));
+ DCHECK(function.is(InstanceOfDescriptor::RightRegister()));
+
+ // Check if {object} is a smi.
+ Label object_is_smi;
+ __ JumpIfSmi(object, &object_is_smi, Label::kNear);
+
+ // Lookup the {function} and the {object} map in the global instanceof cache.
+ // Note: This is safe because we clear the global instanceof cache whenever
+ // we change the prototype of any object.
+ Label fast_case, slow_case;
+ __ mov(object_map, FieldOperand(object, HeapObject::kMapOffset));
+ __ CompareRoot(function, scratch, Heap::kInstanceofCacheFunctionRootIndex);
+ __ j(not_equal, &fast_case, Label::kNear);
+ __ CompareRoot(object_map, scratch, Heap::kInstanceofCacheMapRootIndex);
+ __ j(not_equal, &fast_case, Label::kNear);
+ __ LoadRoot(eax, Heap::kInstanceofCacheAnswerRootIndex);
+ __ ret(0);
- // Check that the left hand is a JS object.
- __ JumpIfSmi(object, ¬_js_object);
- __ IsObjectJSObjectType(object, map, scratch, ¬_js_object);
-
- // If there is a call site cache don't look in the global cache, but do the
- // real lookup and update the call site cache.
- if (!HasCallSiteInlineCheck() && !ReturnTrueFalseObject()) {
- // Look up the function and the map in the instanceof cache.
- Label miss;
- __ CompareRoot(function, scratch, Heap::kInstanceofCacheFunctionRootIndex);
- __ j(not_equal, &miss, Label::kNear);
- __ CompareRoot(map, scratch, Heap::kInstanceofCacheMapRootIndex);
- __ j(not_equal, &miss, Label::kNear);
- __ LoadRoot(eax, Heap::kInstanceofCacheAnswerRootIndex);
- __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
- __ bind(&miss);
- }
+ // If {object} is a smi we can safely return false if {function} is a JS
+ // function, otherwise we have to miss to the runtime and throw an exception.
+ __ bind(&object_is_smi);
+ __ JumpIfSmi(function, &slow_case);
+ __ CmpObjectType(function, JS_FUNCTION_TYPE, function_map);
+ __ j(not_equal, &slow_case);
+ __ LoadRoot(eax, Heap::kFalseValueRootIndex);
+ __ ret(0);
- // Get the prototype of the function.
- __ TryGetFunctionPrototype(function, prototype, scratch, &slow, true);
+ // Fast-case: The {function} must be a valid JSFunction.
+ __ bind(&fast_case);
+ __ JumpIfSmi(function, &slow_case);
+ __ CmpObjectType(function, JS_FUNCTION_TYPE, function_map);
+ __ j(not_equal, &slow_case);
- // Check that the function prototype is a JS object.
- __ JumpIfSmi(prototype, &slow);
- __ IsObjectJSObjectType(prototype, scratch, scratch, &slow);
+ // Ensure that {function} has an instance prototype.
+ __ test_b(FieldOperand(function_map, Map::kBitFieldOffset),
+ static_cast<uint8_t>(1 << Map::kHasNonInstancePrototype));
+ __ j(not_zero, &slow_case);
- // Update the global instanceof or call site inlined cache with the current
- // map and function. The cached answer will be set when it is known below.
- if (!HasCallSiteInlineCheck()) {
- __ StoreRoot(map, scratch, Heap::kInstanceofCacheMapRootIndex);
- __ StoreRoot(function, scratch, Heap::kInstanceofCacheFunctionRootIndex);
- } else {
- // The constants for the code patching are based on no push instructions
- // at the call site.
- DCHECK(HasArgsInRegisters());
- // Get return address and delta to inlined map check.
- __ mov(scratch, Operand(esp, 0 * kPointerSize));
- __ sub(scratch, Operand(esp, 1 * kPointerSize));
- if (FLAG_debug_code) {
- __ cmpb(Operand(scratch, 0), kCmpEdiOperandByte1);
- __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheCmp1);
- __ cmpb(Operand(scratch, 1), kCmpEdiOperandByte2);
- __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheCmp2);
- }
- __ mov(scratch, Operand(scratch, kDeltaToCmpImmediate));
- __ mov(Operand(scratch, 0), map);
- __ push(map);
- // Scratch points at the cell payload. Calculate the start of the object.
- __ sub(scratch, Immediate(Cell::kValueOffset - 1));
- __ RecordWriteField(scratch, Cell::kValueOffset, map, function,
- kDontSaveFPRegs, OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
- __ pop(map);
- }
+ // Ensure that {function} is not bound.
+ Register const shared_info = scratch;
+ __ mov(shared_info,
+ FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
+ __ BooleanBitTest(shared_info, SharedFunctionInfo::kCompilerHintsOffset,
+ SharedFunctionInfo::kBoundFunction);
+ __ j(not_zero, &slow_case);
- // Loop through the prototype chain of the object looking for the function
- // prototype.
- __ mov(scratch, FieldOperand(map, Map::kPrototypeOffset));
- Label loop, is_instance, is_not_instance;
+ // Get the "prototype" (or initial map) of the {function}.
+ __ mov(function_prototype,
+ FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
+ __ AssertNotSmi(function_prototype);
+
+ // Resolve the prototype if the {function} has an initial map. Afterwards the
+ // {function_prototype} will be either the JSReceiver prototype object or the
+ // hole value, which means that no instances of the {function} were created so
+ // far and hence we should return false.
+ Label function_prototype_valid;
+ Register const function_prototype_map = scratch;
+ __ CmpObjectType(function_prototype, MAP_TYPE, function_prototype_map);
+ __ j(not_equal, &function_prototype_valid, Label::kNear);
+ __ mov(function_prototype,
+ FieldOperand(function_prototype, Map::kPrototypeOffset));
+ __ bind(&function_prototype_valid);
+ __ AssertNotSmi(function_prototype);
+
+ // Update the global instanceof cache with the current {object} map and
+ // {function}. The cached answer will be set when it is known below.
+ __ StoreRoot(function, scratch, Heap::kInstanceofCacheFunctionRootIndex);
+ __ StoreRoot(object_map, scratch, Heap::kInstanceofCacheMapRootIndex);
+
+ // Loop through the prototype chain looking for the {function} prototype.
+ // Assume true, and change to false if not found.
+ Register const object_prototype = object_map;
+ Label done, loop;
+ __ mov(eax, isolate()->factory()->true_value());
__ bind(&loop);
- __ cmp(scratch, prototype);
- __ j(equal, &is_instance, Label::kNear);
- Factory* factory = isolate()->factory();
- __ cmp(scratch, Immediate(factory->null_value()));
- __ j(equal, &is_not_instance, Label::kNear);
- __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
- __ mov(scratch, FieldOperand(scratch, Map::kPrototypeOffset));
- __ jmp(&loop);
-
- __ bind(&is_instance);
- if (!HasCallSiteInlineCheck()) {
- __ mov(eax, Immediate(0));
- __ StoreRoot(eax, scratch, Heap::kInstanceofCacheAnswerRootIndex);
- if (ReturnTrueFalseObject()) {
- __ mov(eax, factory->true_value());
- }
- } else {
- // Get return address and delta to inlined map check.
- __ mov(eax, factory->true_value());
- __ mov(scratch, Operand(esp, 0 * kPointerSize));
- __ sub(scratch, Operand(esp, 1 * kPointerSize));
- if (FLAG_debug_code) {
- __ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte);
- __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov);
- }
- __ mov(Operand(scratch, kDeltaToMovImmediate), eax);
- if (!ReturnTrueFalseObject()) {
- __ Move(eax, Immediate(0));
- }
- }
- __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
-
- __ bind(&is_not_instance);
- if (!HasCallSiteInlineCheck()) {
- __ mov(eax, Immediate(Smi::FromInt(1)));
- __ StoreRoot(eax, scratch, Heap::kInstanceofCacheAnswerRootIndex);
- if (ReturnTrueFalseObject()) {
- __ mov(eax, factory->false_value());
- }
- } else {
- // Get return address and delta to inlined map check.
- __ mov(eax, factory->false_value());
- __ mov(scratch, Operand(esp, 0 * kPointerSize));
- __ sub(scratch, Operand(esp, 1 * kPointerSize));
- if (FLAG_debug_code) {
- __ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte);
- __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov);
- }
- __ mov(Operand(scratch, kDeltaToMovImmediate), eax);
- if (!ReturnTrueFalseObject()) {
- __ Move(eax, Immediate(Smi::FromInt(1)));
- }
- }
- __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
-
- Label object_not_null, object_not_null_or_smi;
- __ bind(¬_js_object);
- // Before null, smi and string value checks, check that the rhs is a function
- // as for a non-function rhs an exception needs to be thrown.
- __ JumpIfSmi(function, &slow, Label::kNear);
- __ CmpObjectType(function, JS_FUNCTION_TYPE, scratch);
- __ j(not_equal, &slow, Label::kNear);
-
- // Null is not instance of anything.
- __ cmp(object, factory->null_value());
- __ j(not_equal, &object_not_null, Label::kNear);
- if (ReturnTrueFalseObject()) {
- __ mov(eax, factory->false_value());
- } else {
- __ Move(eax, Immediate(Smi::FromInt(1)));
- }
- __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
-
- __ bind(&object_not_null);
- // Smi values is not instance of anything.
- __ JumpIfNotSmi(object, &object_not_null_or_smi, Label::kNear);
- if (ReturnTrueFalseObject()) {
- __ mov(eax, factory->false_value());
- } else {
- __ Move(eax, Immediate(Smi::FromInt(1)));
- }
- __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
-
- __ bind(&object_not_null_or_smi);
- // String values is not instance of anything.
- Condition is_string = masm->IsObjectStringType(object, scratch, scratch);
- __ j(NegateCondition(is_string), &slow, Label::kNear);
- if (ReturnTrueFalseObject()) {
- __ mov(eax, factory->false_value());
- } else {
- __ Move(eax, Immediate(Smi::FromInt(1)));
- }
- __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
+ __ mov(object_prototype, FieldOperand(object_map, Map::kPrototypeOffset));
+ __ cmp(object_prototype, function_prototype);
+ __ j(equal, &done, Label::kNear);
+ __ cmp(object_prototype, isolate()->factory()->null_value());
+ __ mov(object_map, FieldOperand(object_prototype, HeapObject::kMapOffset));
+ __ j(not_equal, &loop);
+ __ mov(eax, isolate()->factory()->false_value());
+ __ bind(&done);
+ __ StoreRoot(eax, scratch, Heap::kInstanceofCacheAnswerRootIndex);
+ __ ret(0);
- // Slow-case: Go through the JavaScript implementation.
- __ bind(&slow);
- if (!ReturnTrueFalseObject()) {
- // Tail call the builtin which returns 0 or 1.
- if (HasArgsInRegisters()) {
- // Push arguments below return address.
- __ pop(scratch);
- __ push(object);
- __ push(function);
- __ push(scratch);
- }
- __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
- } else {
- // Call the builtin and convert 0/1 to true/false.
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
- __ push(object);
- __ push(function);
- __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION);
- }
- Label true_value, done;
- __ test(eax, eax);
- __ j(zero, &true_value, Label::kNear);
- __ mov(eax, factory->false_value());
- __ jmp(&done, Label::kNear);
- __ bind(&true_value);
- __ mov(eax, factory->true_value());
- __ bind(&done);
- __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
- }
+ // Slow-case: Call the runtime function.
+ __ bind(&slow_case);
+ __ pop(scratch); // Pop return address.
+ __ push(object); // Push {object}.
+ __ push(function); // Push {function}.
+ __ push(scratch); // Push return address.
+ __ TailCallRuntime(Runtime::kInstanceOf, 2, 1);
}
}
+template <class InstrType>
+void LCodeGen::EmitTrueBranch(InstrType instr, Condition cc) {
+ int true_block = instr->TrueDestination(chunk_);
+ if (cc == no_condition) {
+ __ jmp(chunk_->GetAssemblyLabel(true_block));
+ } else {
+ __ j(cc, chunk_->GetAssemblyLabel(true_block));
+ }
+}
+
+
template<class InstrType>
void LCodeGen::EmitFalseBranch(InstrType instr, Condition cc) {
int false_block = instr->FalseDestination(chunk_);
void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
- // Object and function are in fixed registers defined by the stub.
DCHECK(ToRegister(instr->context()).is(esi));
- InstanceofStub stub(isolate(), InstanceofStub::kArgsInRegisters);
+ DCHECK(ToRegister(instr->left()).is(InstanceOfDescriptor::LeftRegister()));
+ DCHECK(ToRegister(instr->right()).is(InstanceOfDescriptor::RightRegister()));
+ DCHECK(ToRegister(instr->result()).is(eax));
+ InstanceOfStub stub(isolate());
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
-
- Label true_value, done;
- __ test(eax, Operand(eax));
- __ j(zero, &true_value, Label::kNear);
- __ mov(ToRegister(instr->result()), factory()->false_value());
- __ jmp(&done, Label::kNear);
- __ bind(&true_value);
- __ mov(ToRegister(instr->result()), factory()->true_value());
- __ bind(&done);
-}
-
-
-void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
- class DeferredInstanceOfKnownGlobal final : public LDeferredCode {
- public:
- DeferredInstanceOfKnownGlobal(LCodeGen* codegen,
- LInstanceOfKnownGlobal* instr,
- const X87Stack& x87_stack)
- : LDeferredCode(codegen, x87_stack), instr_(instr) { }
- void Generate() override {
- codegen()->DoDeferredInstanceOfKnownGlobal(instr_, &map_check_);
- }
- LInstruction* instr() override { return instr_; }
- Label* map_check() { return &map_check_; }
- private:
- LInstanceOfKnownGlobal* instr_;
- Label map_check_;
- };
-
- DeferredInstanceOfKnownGlobal* deferred;
- deferred = new(zone()) DeferredInstanceOfKnownGlobal(this, instr, x87_stack_);
-
- Label done, false_result;
- Register object = ToRegister(instr->value());
- Register temp = ToRegister(instr->temp());
-
- // A Smi is not an instance of anything.
- __ JumpIfSmi(object, &false_result, Label::kNear);
-
- // This is the inlined call site instanceof cache. The two occurences of the
- // hole value will be patched to the last map/result pair generated by the
- // instanceof stub.
- Label cache_miss;
- Register map = ToRegister(instr->temp());
- __ mov(map, FieldOperand(object, HeapObject::kMapOffset));
- __ bind(deferred->map_check()); // Label for calculating code patching.
- Handle<Cell> cache_cell = factory()->NewCell(factory()->the_hole_value());
- __ cmp(map, Operand::ForCell(cache_cell)); // Patched to cached map.
- __ j(not_equal, &cache_miss, Label::kNear);
- __ mov(eax, factory()->the_hole_value()); // Patched to either true or false.
- __ jmp(&done, Label::kNear);
-
- // The inlined call site cache did not match. Check for null and string
- // before calling the deferred code.
- __ bind(&cache_miss);
- // Null is not an instance of anything.
- __ cmp(object, factory()->null_value());
- __ j(equal, &false_result, Label::kNear);
-
- // String values are not instances of anything.
- Condition is_string = masm_->IsObjectStringType(object, temp, temp);
- __ j(is_string, &false_result, Label::kNear);
-
- // Go to the deferred code.
- __ jmp(deferred->entry());
-
- __ bind(&false_result);
- __ mov(ToRegister(instr->result()), factory()->false_value());
-
- // Here result has either true or false. Deferred code also produces true or
- // false object.
- __ bind(deferred->exit());
- __ bind(&done);
}
-void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
- Label* map_check) {
- PushSafepointRegistersScope scope(this);
+void LCodeGen::DoHasInPrototypeChainAndBranch(
+ LHasInPrototypeChainAndBranch* instr) {
+ Register const object = ToRegister(instr->object());
+ Register const object_map = ToRegister(instr->scratch());
+ Register const object_prototype = object_map;
+ Register const prototype = ToRegister(instr->prototype());
- InstanceofStub::Flags flags = InstanceofStub::kNoFlags;
- flags = static_cast<InstanceofStub::Flags>(
- flags | InstanceofStub::kArgsInRegisters);
- flags = static_cast<InstanceofStub::Flags>(
- flags | InstanceofStub::kCallSiteInlineCheck);
- flags = static_cast<InstanceofStub::Flags>(
- flags | InstanceofStub::kReturnTrueFalseObject);
- InstanceofStub stub(isolate(), flags);
-
- // Get the temp register reserved by the instruction. This needs to be a
- // register which is pushed last by PushSafepointRegisters as top of the
- // stack is used to pass the offset to the location of the map check to
- // the stub.
- Register temp = ToRegister(instr->temp());
- DCHECK(MacroAssembler::SafepointRegisterStackIndex(temp) == 0);
- __ LoadHeapObject(InstanceofStub::right(), instr->function());
- static const int kAdditionalDelta = 13;
- int delta = masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta;
- __ mov(temp, Immediate(delta));
- __ StoreToSafepointRegisterSlot(temp, temp);
- CallCodeGeneric(stub.GetCode(),
- RelocInfo::CODE_TARGET,
- instr,
- RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
- // Get the deoptimization index of the LLazyBailout-environment that
- // corresponds to this instruction.
- LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment();
- safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
+ // The {object} must be a spec object. It's sufficient to know that {object}
+ // is not a smi, since all other non-spec objects have {null} prototypes and
+ // will be ruled out below.
+ if (instr->hydrogen()->ObjectNeedsSmiCheck()) {
+ __ test(object, Immediate(kSmiTagMask));
+ EmitFalseBranch(instr, zero);
+ }
- // Put the result value into the eax slot and restore all registers.
- __ StoreToSafepointRegisterSlot(eax, eax);
+ // Loop through the {object}s prototype chain looking for the {prototype}.
+ __ mov(object_map, FieldOperand(object, HeapObject::kMapOffset));
+ Label loop;
+ __ bind(&loop);
+ __ mov(object_prototype, FieldOperand(object_map, Map::kPrototypeOffset));
+ __ cmp(object_prototype, prototype);
+ EmitTrueBranch(instr, equal);
+ __ cmp(object_prototype, factory()->null_value());
+ EmitFalseBranch(instr, equal);
+ __ mov(object_map, FieldOperand(object_prototype, HeapObject::kMapOffset));
+ __ jmp(&loop);
}