From 10e04142d149f7e38c24c70923d211b2b30eb8b3 Mon Sep 17 00:00:00 2001 From: "haitao.feng@intel.com" Date: Fri, 24 Jan 2014 01:45:53 +0000 Subject: [PATCH] Introduce Assembler::RelocInfoNone function for X64 R=verwaest@chromium.org Review URL: https://codereview.chromium.org/140553004 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@18802 ce2b1a6d-e550-0410-aec6-3dcde31c8c00 --- src/x64/assembler-x64.cc | 2 +- src/x64/assembler-x64.h | 9 +++++++++ src/x64/code-stubs-x64.cc | 10 +++++----- src/x64/deoptimizer-x64.cc | 2 +- src/x64/full-codegen-x64.cc | 2 +- src/x64/lithium-codegen-x64.cc | 2 +- src/x64/macro-assembler-x64.cc | 28 +++++++++++++++------------- src/x64/macro-assembler-x64.h | 4 ++-- 8 files changed, 35 insertions(+), 24 deletions(-) diff --git a/src/x64/assembler-x64.cc b/src/x64/assembler-x64.cc index 4585fa7..e7c20bb 100644 --- a/src/x64/assembler-x64.cc +++ b/src/x64/assembler-x64.cc @@ -110,7 +110,7 @@ void RelocInfo::PatchCodeWithCall(Address target, int guard_bytes) { #endif // Patch the code. - patcher.masm()->movp(kScratchRegister, target, RelocInfo::NONE64); + patcher.masm()->movp(kScratchRegister, target, Assembler::RelocInfoNone()); patcher.masm()->call(kScratchRegister); // Check that the size of the code generated is as expected. diff --git a/src/x64/assembler-x64.h b/src/x64/assembler-x64.h index 39a6863..9692ec0 100644 --- a/src/x64/assembler-x64.h +++ b/src/x64/assembler-x64.h @@ -590,6 +590,15 @@ class Assembler : public AssemblerBase { set_target_address_at(instruction_payload, target); } + static inline RelocInfo::Mode RelocInfoNone() { + if (kPointerSize == kInt64Size) { + return RelocInfo::NONE64; + } else { + ASSERT(kPointerSize == kInt32Size); + return RelocInfo::NONE32; + } + } + inline Handle code_target_object_handle_at(Address pc); inline Address runtime_entry_at(Address pc); // Number of bytes taken up by the branch target in the code. diff --git a/src/x64/code-stubs-x64.cc b/src/x64/code-stubs-x64.cc index 23b03ea..b8ef7de 100644 --- a/src/x64/code-stubs-x64.cc +++ b/src/x64/code-stubs-x64.cc @@ -2719,7 +2719,7 @@ void CEntryStub::Generate(MacroAssembler* masm) { // Do full GC and retry runtime call one final time. Failure* failure = Failure::InternalError(); - __ Move(rax, failure, RelocInfo::NONE64); + __ Move(rax, failure, Assembler::RelocInfoNone()); GenerateCore(masm, &throw_normal_exception, &throw_termination_exception, @@ -2740,7 +2740,7 @@ void CEntryStub::Generate(MacroAssembler* masm) { isolate); Label already_have_failure; JumpIfOOM(masm, rax, kScratchRegister, &already_have_failure); - __ Move(rax, Failure::OutOfMemoryException(0x1), RelocInfo::NONE64); + __ Move(rax, Failure::OutOfMemoryException(0x1), Assembler::RelocInfoNone()); __ bind(&already_have_failure); __ Store(pending_exception, rax); // Fall through to the next label. @@ -2770,7 +2770,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { // Scratch register is neither callee-save, nor an argument register on any // platform. It's free to use at this point. // Cannot use smi-register for loading yet. - __ Move(kScratchRegister, Smi::FromInt(marker), RelocInfo::NONE64); + __ Move(kScratchRegister, Smi::FromInt(marker), Assembler::RelocInfoNone()); __ push(kScratchRegister); // context slot __ push(kScratchRegister); // function slot // Save callee-saved registers (X64/Win64 calling conventions). @@ -2838,7 +2838,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { ExternalReference pending_exception(Isolate::kPendingExceptionAddress, isolate); __ Store(pending_exception, rax); - __ Move(rax, Failure::Exception(), RelocInfo::NONE64); + __ Move(rax, Failure::Exception(), Assembler::RelocInfoNone()); __ jmp(&exit); // Invoke: Link this frame into the handler chain. There's only one @@ -4878,7 +4878,7 @@ void ProfileEntryHookStub::Generate(MacroAssembler* masm) { // Call the entry hook function. __ Move(rax, FUNCTION_ADDR(masm->isolate()->function_entry_hook()), - RelocInfo::NONE64); + Assembler::RelocInfoNone()); AllowExternalCallThatCantCauseGC scope(masm); diff --git a/src/x64/deoptimizer-x64.cc b/src/x64/deoptimizer-x64.cc index 1094dab..89a5f21 100644 --- a/src/x64/deoptimizer-x64.cc +++ b/src/x64/deoptimizer-x64.cc @@ -71,7 +71,7 @@ void Deoptimizer::PatchCodeForDeoptimization(Isolate* isolate, Code* code) { // LLazyBailout instructions with nops if necessary. CodePatcher patcher(call_address, Assembler::kCallSequenceLength); patcher.masm()->Call(GetDeoptimizationEntry(isolate, i, LAZY), - RelocInfo::NONE64); + Assembler::RelocInfoNone()); ASSERT(prev_call_address == NULL || call_address >= prev_call_address + patch_size()); ASSERT(call_address + patch_size() <= code->instruction_end()); diff --git a/src/x64/full-codegen-x64.cc b/src/x64/full-codegen-x64.cc index 64df73f..c4aedd4 100644 --- a/src/x64/full-codegen-x64.cc +++ b/src/x64/full-codegen-x64.cc @@ -3364,7 +3364,7 @@ void FullCodeGenerator::EmitDateField(CallRuntime* expr) { __ bind(&runtime); __ PrepareCallCFunction(2); __ movp(arg_reg_1, object); - __ Move(arg_reg_2, index, RelocInfo::NONE64); + __ Move(arg_reg_2, index, Assembler::RelocInfoNone()); __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2); __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); __ jmp(&done); diff --git a/src/x64/lithium-codegen-x64.cc b/src/x64/lithium-codegen-x64.cc index e99c9e3..1c3b380 100644 --- a/src/x64/lithium-codegen-x64.cc +++ b/src/x64/lithium-codegen-x64.cc @@ -1643,7 +1643,7 @@ void LCodeGen::DoDateField(LDateField* instr) { __ bind(&runtime); __ PrepareCallCFunction(2); __ movp(arg_reg_1, object); - __ Move(arg_reg_2, index, RelocInfo::NONE64); + __ Move(arg_reg_2, index, Assembler::RelocInfoNone()); __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2); __ bind(&done); } diff --git a/src/x64/macro-assembler-x64.cc b/src/x64/macro-assembler-x64.cc index a1f3f81..1fa2c8d 100644 --- a/src/x64/macro-assembler-x64.cc +++ b/src/x64/macro-assembler-x64.cc @@ -163,7 +163,7 @@ void MacroAssembler::PushAddress(ExternalReference source) { int64_t address = reinterpret_cast(source.address()); if (is_int32(address) && !Serializer::enabled()) { if (emit_debug_code()) { - Move(kScratchRegister, kZapValue, RelocInfo::NONE64); + Move(kScratchRegister, kZapValue, Assembler::RelocInfoNone()); } push(Immediate(static_cast(address))); return; @@ -289,7 +289,7 @@ void MacroAssembler::InNewSpace(Register object, intptr_t new_space_start = reinterpret_cast(isolate()->heap()->NewSpaceStart()); Move(kScratchRegister, reinterpret_cast
(-new_space_start), - RelocInfo::NONE64); + Assembler::RelocInfoNone()); if (scratch.is(object)) { addq(scratch, kScratchRegister); } else { @@ -340,8 +340,8 @@ void MacroAssembler::RecordWriteField( // Clobber clobbered input registers when running with the debug-code flag // turned on to provoke errors. if (emit_debug_code()) { - Move(value, kZapValue, RelocInfo::NONE64); - Move(dst, kZapValue, RelocInfo::NONE64); + Move(value, kZapValue, Assembler::RelocInfoNone()); + Move(dst, kZapValue, Assembler::RelocInfoNone()); } } @@ -374,8 +374,8 @@ void MacroAssembler::RecordWriteArray(Register object, // Clobber clobbered input registers when running with the debug-code flag // turned on to provoke errors. if (emit_debug_code()) { - Move(value, kZapValue, RelocInfo::NONE64); - Move(index, kZapValue, RelocInfo::NONE64); + Move(value, kZapValue, Assembler::RelocInfoNone()); + Move(index, kZapValue, Assembler::RelocInfoNone()); } } @@ -439,8 +439,8 @@ void MacroAssembler::RecordWrite(Register object, // Clobber clobbered registers when running with the debug-code flag // turned on to provoke errors. if (emit_debug_code()) { - Move(address, kZapValue, RelocInfo::NONE64); - Move(value, kZapValue, RelocInfo::NONE64); + Move(address, kZapValue, Assembler::RelocInfoNone()); + Move(value, kZapValue, Assembler::RelocInfoNone()); } } @@ -528,10 +528,11 @@ void MacroAssembler::Abort(BailoutReason reason) { #endif push(rax); - Move(kScratchRegister, reinterpret_cast(p0), RelocInfo::NONE64); + Move(kScratchRegister, reinterpret_cast(p0), + Assembler::RelocInfoNone()); push(kScratchRegister); Move(kScratchRegister, Smi::FromInt(static_cast(p1 - p0)), - RelocInfo::NONE64); + Assembler::RelocInfoNone()); push(kScratchRegister); if (!has_frame_) { @@ -1043,7 +1044,8 @@ Register MacroAssembler::GetSmiConstant(Smi* source) { void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) { if (emit_debug_code()) { - Move(dst, Smi::FromInt(kSmiConstantRegisterValue), RelocInfo::NONE64); + Move(dst, Smi::FromInt(kSmiConstantRegisterValue), + Assembler::RelocInfoNone()); cmpq(dst, kSmiConstantRegister); Assert(equal, kUninitializedKSmiConstantRegister); } @@ -1083,7 +1085,7 @@ void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) { UNREACHABLE(); return; default: - Move(dst, source, RelocInfo::NONE64); + Move(dst, source, Assembler::RelocInfoNone()); return; } if (negative) { @@ -2623,7 +2625,7 @@ void MacroAssembler::Call(ExternalReference ext) { void MacroAssembler::Call(Address destination, RelocInfo::Mode rmode) { #ifdef DEBUG - int end_position = pc_offset() + CallSize(destination, rmode); + int end_position = pc_offset() + CallSize(destination); #endif Move(kScratchRegister, destination, rmode); call(kScratchRegister); diff --git a/src/x64/macro-assembler-x64.h b/src/x64/macro-assembler-x64.h index 08db9b9..048eff1 100644 --- a/src/x64/macro-assembler-x64.h +++ b/src/x64/macro-assembler-x64.h @@ -391,7 +391,7 @@ class MacroAssembler: public Assembler { void InitializeSmiConstantRegister() { Move(kSmiConstantRegister, Smi::FromInt(kSmiConstantRegisterValue), - RelocInfo::NONE64); + Assembler::RelocInfoNone()); } // Conversions between tagged smi values and non-tagged integer values. @@ -877,7 +877,7 @@ class MacroAssembler: public Assembler { TypeFeedbackId ast_id = TypeFeedbackId::None()); // The size of the code generated for different call instructions. - int CallSize(Address destination, RelocInfo::Mode rmode) { + int CallSize(Address destination) { return kCallSequenceLength; } int CallSize(ExternalReference ext); -- 2.7.4