Register map = r6; // Map of the object.
const Register function = r4; // Function (rhs).
const Register prototype = r7; // Prototype of the function.
- const Register inline_site = r9;
+ // The map_check_delta was stored in r8
+ // The bool_load_delta was stored in r9
+ // (See LCodeGen::DoDeferredLInstanceOfKnownGlobal).
+ const Register map_check_delta = r8;
+ const Register bool_load_delta = r9;
+ const Register inline_site = r10;
const Register scratch = r5;
Register scratch3 = no_reg;
-
- // delta = mov + tagged LoadP + cmp + bne
- const int32_t kDeltaToLoadBoolResult =
- (Assembler::kMovInstructions + Assembler::kTaggedLoadInstructions + 2) *
- Assembler::kInstrSize;
-
Label slow, loop, is_instance, is_not_instance, not_js_object;
if (!HasArgsInRegisters()) {
DCHECK(HasArgsInRegisters());
// Patch the (relocated) inlined map check.
- // The offset was stored in r8
- // (See LCodeGen::DoDeferredLInstanceOfKnownGlobal).
- const Register offset = r8;
+ const Register offset = map_check_delta;
__ mflr(inline_site);
__ sub(inline_site, inline_site, offset);
- // Get the map location in r8 and patch it.
+ // Get the map location in offset and patch it.
__ GetRelocatedValue(inline_site, offset, scratch);
__ StoreP(map, FieldMemOperand(offset, Cell::kValueOffset), r0);
- __ mr(r10, map);
- __ RecordWriteField(offset, Cell::kValueOffset, r10, function,
+ __ mr(r11, map);
+ __ RecordWriteField(offset, Cell::kValueOffset, r11, function,
kLRHasNotBeenSaved, kDontSaveFPRegs,
OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
}
} else {
// Patch the call site to return true.
__ LoadRoot(r3, Heap::kTrueValueRootIndex);
- __ addi(inline_site, inline_site, Operand(kDeltaToLoadBoolResult));
+ __ add(inline_site, inline_site, bool_load_delta);
// Get the boolean result location in scratch and patch it.
__ SetRelocatedValue(inline_site, scratch, r3);
} else {
// Patch the call site to return false.
__ LoadRoot(r3, Heap::kFalseValueRootIndex);
- __ addi(inline_site, inline_site, Operand(kDeltaToLoadBoolResult));
+ __ add(inline_site, inline_site, bool_load_delta);
// Get the boolean result location in scratch and patch it.
__ SetRelocatedValue(inline_site, scratch, r3);
LInstanceOfKnownGlobal* instr)
: LDeferredCode(codegen), instr_(instr) {}
void Generate() override {
- codegen()->DoDeferredInstanceOfKnownGlobal(instr_, &map_check_);
+ codegen()->DoDeferredInstanceOfKnownGlobal(instr_, &map_check_,
+ &load_bool_);
}
LInstruction* instr() override { return instr_; }
Label* map_check() { return &map_check_; }
+ Label* load_bool() { return &load_bool_; }
private:
LInstanceOfKnownGlobal* instr_;
Label map_check_;
+ Label load_bool_;
};
DeferredInstanceOfKnownGlobal* deferred;
__ LoadP(ip, FieldMemOperand(ip, Cell::kValueOffset));
__ cmp(map, ip);
__ bc_short(ne, &cache_miss);
+ __ bind(deferred->load_bool()); // Label for calculating code patching.
// We use Factory::the_hole_value() on purpose instead of loading from the
// root array to force relocation to be able to later patch
// with true or false.
void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
- Label* map_check) {
+ Label* map_check,
+ Label* bool_load) {
InstanceofStub::Flags flags = InstanceofStub::kNoFlags;
flags = static_cast<InstanceofStub::Flags>(flags |
InstanceofStub::kArgsInRegisters);
{
Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
Handle<Code> code = stub.GetCode();
- // Include instructions below in delta: bitwise_mov32 + call
- int delta = (masm_->InstructionsGeneratedSince(map_check) + 2) *
- Instruction::kInstrSize +
- masm_->CallSize(code);
- // r8 is used to communicate the offset to the location of the map check.
- if (is_int16(delta)) {
- delta -= Instruction::kInstrSize;
- __ li(r8, Operand(delta));
- } else {
- __ bitwise_mov32(r8, delta);
- }
+ // Include instructions below in delta: bitwise_mov32 + li + call
+ int additional_delta = 3 * Instruction::kInstrSize + masm_->CallSize(code);
+ // The labels must be already bound since the code has predictabel size up
+ // to the call instruction.
+ DCHECK(map_check->is_bound());
+ DCHECK(bool_load->is_bound());
+ int map_check_delta =
+ masm_->InstructionsGeneratedSince(map_check) * Instruction::kInstrSize;
+ int bool_load_delta =
+ masm_->InstructionsGeneratedSince(bool_load) * Instruction::kInstrSize;
+ // r8 is the delta from our callee's lr to the location of the map check.
+ __ bitwise_mov32(r8, map_check_delta + additional_delta);
+ // r9 is the delta from map check to bool load.
+ __ li(r9, Operand(map_check_delta - bool_load_delta));
CallCodeGeneric(code, RelocInfo::CODE_TARGET, instr,
RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
- DCHECK(delta / Instruction::kInstrSize ==
- masm_->InstructionsGeneratedSince(map_check));
+ DCHECK_EQ((map_check_delta + additional_delta) / Instruction::kInstrSize,
+ masm_->InstructionsGeneratedSince(map_check));
}
LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment();
safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
void DoDeferredStringCharFromCode(LStringCharFromCode* instr);
void DoDeferredAllocate(LAllocate* instr);
void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
- Label* map_check);
+ Label* map_check, Label* bool_load);
void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr, Register result,
Register object, Register index);