From 549749d374f0c6ba1a4f3251e0b71aa6fd4bf68b Mon Sep 17 00:00:00 2001 From: mbrandy Date: Wed, 1 Jul 2015 06:34:57 -0700 Subject: [PATCH] PPC: Fix InstanceOfStub's inlined call site logic. This change makes the patching logic less prone to errors in the face of variable instruction mov sequences. R=dstence@us.ibm.com, michael_dawson@ca.ibm.com BUG= Review URL: https://codereview.chromium.org/1213383003 Cr-Commit-Position: refs/heads/master@{#29416} --- src/ppc/code-stubs-ppc.cc | 27 ++++++++++++--------------- src/ppc/lithium-codegen-ppc.cc | 38 +++++++++++++++++++++++--------------- src/ppc/lithium-codegen-ppc.h | 2 +- 3 files changed, 36 insertions(+), 31 deletions(-) diff --git a/src/ppc/code-stubs-ppc.cc b/src/ppc/code-stubs-ppc.cc index 848fc22..b852de9 100644 --- a/src/ppc/code-stubs-ppc.cc +++ b/src/ppc/code-stubs-ppc.cc @@ -1379,15 +1379,14 @@ void InstanceofStub::Generate(MacroAssembler* masm) { Register map = r6; // Map of the object. const Register function = r4; // Function (rhs). const Register prototype = r7; // Prototype of the function. - const Register inline_site = r9; + // The map_check_delta was stored in r8 + // The bool_load_delta was stored in r9 + // (See LCodeGen::DoDeferredLInstanceOfKnownGlobal). + const Register map_check_delta = r8; + const Register bool_load_delta = r9; + const Register inline_site = r10; const Register scratch = r5; Register scratch3 = no_reg; - - // delta = mov + tagged LoadP + cmp + bne - const int32_t kDeltaToLoadBoolResult = - (Assembler::kMovInstructions + Assembler::kTaggedLoadInstructions + 2) * - Assembler::kInstrSize; - Label slow, loop, is_instance, is_not_instance, not_js_object; if (!HasArgsInRegisters()) { @@ -1429,17 +1428,15 @@ void InstanceofStub::Generate(MacroAssembler* masm) { DCHECK(HasArgsInRegisters()); // Patch the (relocated) inlined map check. - // The offset was stored in r8 - // (See LCodeGen::DoDeferredLInstanceOfKnownGlobal). - const Register offset = r8; + const Register offset = map_check_delta; __ mflr(inline_site); __ sub(inline_site, inline_site, offset); - // Get the map location in r8 and patch it. + // Get the map location in offset and patch it. __ GetRelocatedValue(inline_site, offset, scratch); __ StoreP(map, FieldMemOperand(offset, Cell::kValueOffset), r0); - __ mr(r10, map); - __ RecordWriteField(offset, Cell::kValueOffset, r10, function, + __ mr(r11, map); + __ RecordWriteField(offset, Cell::kValueOffset, r11, function, kLRHasNotBeenSaved, kDontSaveFPRegs, OMIT_REMEMBERED_SET, OMIT_SMI_CHECK); } @@ -1474,7 +1471,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) { } else { // Patch the call site to return true. __ LoadRoot(r3, Heap::kTrueValueRootIndex); - __ addi(inline_site, inline_site, Operand(kDeltaToLoadBoolResult)); + __ add(inline_site, inline_site, bool_load_delta); // Get the boolean result location in scratch and patch it. __ SetRelocatedValue(inline_site, scratch, r3); @@ -1494,7 +1491,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) { } else { // Patch the call site to return false. __ LoadRoot(r3, Heap::kFalseValueRootIndex); - __ addi(inline_site, inline_site, Operand(kDeltaToLoadBoolResult)); + __ add(inline_site, inline_site, bool_load_delta); // Get the boolean result location in scratch and patch it. __ SetRelocatedValue(inline_site, scratch, r3); diff --git a/src/ppc/lithium-codegen-ppc.cc b/src/ppc/lithium-codegen-ppc.cc index dea936e..8aaf404 100644 --- a/src/ppc/lithium-codegen-ppc.cc +++ b/src/ppc/lithium-codegen-ppc.cc @@ -2813,14 +2813,17 @@ void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) { LInstanceOfKnownGlobal* instr) : LDeferredCode(codegen), instr_(instr) {} void Generate() override { - codegen()->DoDeferredInstanceOfKnownGlobal(instr_, &map_check_); + codegen()->DoDeferredInstanceOfKnownGlobal(instr_, &map_check_, + &load_bool_); } LInstruction* instr() override { return instr_; } Label* map_check() { return &map_check_; } + Label* load_bool() { return &load_bool_; } private: LInstanceOfKnownGlobal* instr_; Label map_check_; + Label load_bool_; }; DeferredInstanceOfKnownGlobal* deferred; @@ -2853,6 +2856,7 @@ void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) { __ LoadP(ip, FieldMemOperand(ip, Cell::kValueOffset)); __ cmp(map, ip); __ bc_short(ne, &cache_miss); + __ bind(deferred->load_bool()); // Label for calculating code patching. // We use Factory::the_hole_value() on purpose instead of loading from the // root array to force relocation to be able to later patch // with true or false. @@ -2886,7 +2890,8 @@ void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) { void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, - Label* map_check) { + Label* map_check, + Label* bool_load) { InstanceofStub::Flags flags = InstanceofStub::kNoFlags; flags = static_cast(flags | InstanceofStub::kArgsInRegisters); @@ -2903,21 +2908,24 @@ void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, { Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); Handle code = stub.GetCode(); - // Include instructions below in delta: bitwise_mov32 + call - int delta = (masm_->InstructionsGeneratedSince(map_check) + 2) * - Instruction::kInstrSize + - masm_->CallSize(code); - // r8 is used to communicate the offset to the location of the map check. - if (is_int16(delta)) { - delta -= Instruction::kInstrSize; - __ li(r8, Operand(delta)); - } else { - __ bitwise_mov32(r8, delta); - } + // Include instructions below in delta: bitwise_mov32 + li + call + int additional_delta = 3 * Instruction::kInstrSize + masm_->CallSize(code); + // The labels must be already bound since the code has predictabel size up + // to the call instruction. + DCHECK(map_check->is_bound()); + DCHECK(bool_load->is_bound()); + int map_check_delta = + masm_->InstructionsGeneratedSince(map_check) * Instruction::kInstrSize; + int bool_load_delta = + masm_->InstructionsGeneratedSince(bool_load) * Instruction::kInstrSize; + // r8 is the delta from our callee's lr to the location of the map check. + __ bitwise_mov32(r8, map_check_delta + additional_delta); + // r9 is the delta from map check to bool load. + __ li(r9, Operand(map_check_delta - bool_load_delta)); CallCodeGeneric(code, RelocInfo::CODE_TARGET, instr, RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); - DCHECK(delta / Instruction::kInstrSize == - masm_->InstructionsGeneratedSince(map_check)); + DCHECK_EQ((map_check_delta + additional_delta) / Instruction::kInstrSize, + masm_->InstructionsGeneratedSince(map_check)); } LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment(); safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); diff --git a/src/ppc/lithium-codegen-ppc.h b/src/ppc/lithium-codegen-ppc.h index d41954f..392bbf5 100644 --- a/src/ppc/lithium-codegen-ppc.h +++ b/src/ppc/lithium-codegen-ppc.h @@ -112,7 +112,7 @@ class LCodeGen : public LCodeGenBase { void DoDeferredStringCharFromCode(LStringCharFromCode* instr); void DoDeferredAllocate(LAllocate* instr); void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, - Label* map_check); + Label* map_check, Label* bool_load); void DoDeferredInstanceMigration(LCheckMaps* instr, Register object); void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr, Register result, Register object, Register index); -- 2.7.4