From: yangguo@chromium.org Date: Fri, 30 Aug 2013 11:24:58 +0000 (+0000) Subject: Turn interrupt and stack check into builtins. X-Git-Tag: upstream/4.7.83~12734 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=ecbfcd7e0630bb460382e98d0d5b23798e45f609;p=platform%2Fupstream%2Fv8.git Turn interrupt and stack check into builtins. R=jkummerow@chromium.org BUG= Review URL: https://codereview.chromium.org/23480013 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@16444 ce2b1a6d-e550-0410-aec6-3dcde31c8c00 --- diff --git a/src/arm/code-stubs-arm.cc b/src/arm/code-stubs-arm.cc index 9a4d6e5..310cf3e 100644 --- a/src/arm/code-stubs-arm.cc +++ b/src/arm/code-stubs-arm.cc @@ -2491,16 +2491,6 @@ Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() { } -void StackCheckStub::Generate(MacroAssembler* masm) { - __ TailCallRuntime(Runtime::kStackGuard, 0, 1); -} - - -void InterruptStub::Generate(MacroAssembler* masm) { - __ TailCallRuntime(Runtime::kInterrupt, 0, 1); -} - - void MathPowStub::Generate(MacroAssembler* masm) { const Register base = r1; const Register exponent = r2; diff --git a/src/arm/deoptimizer-arm.cc b/src/arm/deoptimizer-arm.cc index b2b19ce..e49e5bc 100644 --- a/src/arm/deoptimizer-arm.cc +++ b/src/arm/deoptimizer-arm.cc @@ -161,14 +161,13 @@ Deoptimizer::InterruptPatchState Deoptimizer::GetInterruptPatchState( return PATCHED_FOR_OSR; } else { // Get the interrupt stub code object to match against from cache. - Code* interrupt_code = NULL; - InterruptStub stub; - if (!stub.FindCodeInCache(&interrupt_code, isolate)) UNREACHABLE(); + Code* interrupt_builtin = + isolate->builtins()->builtin(Builtins::kInterruptCheck); ASSERT(Assembler::IsLdrPcImmediateOffset( Assembler::instr_at(pc_after - 2 * kInstrSize))); ASSERT_EQ(kBranchBeforeInterrupt, Memory::int32_at(pc_after - 3 * kInstrSize)); - ASSERT(reinterpret_cast(interrupt_code->entry()) == + ASSERT(reinterpret_cast(interrupt_builtin->entry()) == Memory::uint32_at(interrupt_address_pointer)); return NOT_PATCHED; } diff --git a/src/arm/full-codegen-arm.cc b/src/arm/full-codegen-arm.cc index 3400248..2a0d102 100644 --- a/src/arm/full-codegen-arm.cc +++ b/src/arm/full-codegen-arm.cc @@ -296,8 +296,7 @@ void FullCodeGenerator::Generate() { __ cmp(sp, Operand(ip)); __ b(hs, &ok); PredictableCodeSizeScope predictable(masm_, 2 * Assembler::kInstrSize); - StackCheckStub stub; - __ CallStub(&stub); + __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET); __ bind(&ok); } @@ -366,8 +365,7 @@ void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, } EmitProfilingCounterDecrement(weight); __ b(pl, &ok); - InterruptStub stub; - __ CallStub(&stub); + __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET); // Record a mapping of this PC offset to the OSR id. This is used to find // the AST id from the unoptimized code in order to use it as a key into @@ -416,8 +414,8 @@ void FullCodeGenerator::EmitReturnSequence() { __ push(r2); __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1); } else { - InterruptStub stub; - __ CallStub(&stub); + __ Call(isolate()->builtins()->InterruptCheck(), + RelocInfo::CODE_TARGET); } __ pop(r0); EmitProfilingCounterReset(); diff --git a/src/arm/lithium-codegen-arm.cc b/src/arm/lithium-codegen-arm.cc index 9cb92e8..ae24210 100644 --- a/src/arm/lithium-codegen-arm.cc +++ b/src/arm/lithium-codegen-arm.cc @@ -5643,9 +5643,10 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) { __ LoadRoot(ip, Heap::kStackLimitRootIndex); __ cmp(sp, Operand(ip)); __ b(hs, &done); - StackCheckStub stub; PredictableCodeSizeScope predictable(masm_, 2 * Assembler::kInstrSize); - CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); + CallCode(isolate()->builtins()->StackCheck(), + RelocInfo::CODE_TARGET, + instr); EnsureSpaceForLazyDeopt(); last_lazy_deopt_pc_ = masm()->pc_offset(); __ bind(&done); diff --git a/src/builtins.cc b/src/builtins.cc index 1bc0a72..f1ee0a4 100644 --- a/src/builtins.cc +++ b/src/builtins.cc @@ -1813,6 +1813,16 @@ const char* Builtins::Lookup(byte* pc) { } +void Builtins::Generate_InterruptCheck(MacroAssembler* masm) { + masm->TailCallRuntime(Runtime::kInterrupt, 0, 1); +} + + +void Builtins::Generate_StackCheck(MacroAssembler* masm) { + masm->TailCallRuntime(Runtime::kStackGuard, 0, 1); +} + + #define DEFINE_BUILTIN_ACCESSOR_C(name, ignore) \ Handle Builtins::name() { \ Code** code_address = \ diff --git a/src/builtins.h b/src/builtins.h index 11494c6..a7c774a 100644 --- a/src/builtins.h +++ b/src/builtins.h @@ -211,6 +211,10 @@ enum BuiltinExtraArguments { \ V(OnStackReplacement, BUILTIN, UNINITIALIZED, \ Code::kNoExtraICState) \ + V(InterruptCheck, BUILTIN, UNINITIALIZED, \ + Code::kNoExtraICState) \ + V(StackCheck, BUILTIN, UNINITIALIZED, \ + Code::kNoExtraICState) \ CODE_AGE_LIST_WITH_ARG(DECLARE_CODE_AGE_BUILTIN, V) #ifdef ENABLE_DEBUGGER_SUPPORT @@ -395,6 +399,9 @@ class Builtins { static void Generate_StringConstructCode(MacroAssembler* masm); static void Generate_OnStackReplacement(MacroAssembler* masm); + static void Generate_InterruptCheck(MacroAssembler* masm); + static void Generate_StackCheck(MacroAssembler* masm); + #define DECLARE_CODE_AGE_BUILTIN_GENERATOR(C) \ static void Generate_Make##C##CodeYoungAgainEvenMarking( \ MacroAssembler* masm); \ diff --git a/src/code-stubs.h b/src/code-stubs.h index ccd2caf..7c70583 100644 --- a/src/code-stubs.h +++ b/src/code-stubs.h @@ -449,30 +449,6 @@ class NopRuntimeCallHelper : public RuntimeCallHelper { }; -class StackCheckStub : public PlatformCodeStub { - public: - StackCheckStub() { } - - void Generate(MacroAssembler* masm); - - private: - Major MajorKey() { return StackCheck; } - int MinorKey() { return 0; } -}; - - -class InterruptStub : public PlatformCodeStub { - public: - InterruptStub() { } - - void Generate(MacroAssembler* masm); - - private: - Major MajorKey() { return Interrupt; } - int MinorKey() { return 0; } -}; - - class ToNumberStub: public HydrogenCodeStub { public: ToNumberStub() { } diff --git a/src/deoptimizer.cc b/src/deoptimizer.cc index b76da18..49d046b 100644 --- a/src/deoptimizer.cc +++ b/src/deoptimizer.cc @@ -2593,11 +2593,6 @@ bool Deoptimizer::DoOsrTranslateCommand(TranslationIterator* iterator, void Deoptimizer::PatchInterruptCode(Isolate* isolate, Code* unoptimized_code) { DisallowHeapAllocation no_gc; - // Get the interrupt stub code object to match against. We aren't - // prepared to generate it, but we don't expect to have to. - Code* interrupt_code = NULL; - InterruptStub interrupt_stub; - CHECK(interrupt_stub.FindCodeInCache(&interrupt_code, isolate)); Code* replacement_code = isolate->builtins()->builtin(Builtins::kOnStackReplacement); @@ -2628,9 +2623,9 @@ void Deoptimizer::PatchInterruptCode(Isolate* isolate, void Deoptimizer::RevertInterruptCode(Isolate* isolate, Code* unoptimized_code) { - InterruptStub interrupt_stub; - Code* interrupt_code = *interrupt_stub.GetCode(isolate); DisallowHeapAllocation no_gc; + Code* interrupt_code = + isolate->builtins()->builtin(Builtins::kInterruptCheck); // Iterate over the back edge table and revert the patched interrupt calls. ASSERT(unoptimized_code->back_edges_patched_for_osr()); diff --git a/src/ia32/code-stubs-ia32.cc b/src/ia32/code-stubs-ia32.cc index 64f36b3..42b1296 100644 --- a/src/ia32/code-stubs-ia32.cc +++ b/src/ia32/code-stubs-ia32.cc @@ -4229,16 +4229,6 @@ void ICCompareStub::GenerateGeneric(MacroAssembler* masm) { } -void StackCheckStub::Generate(MacroAssembler* masm) { - __ TailCallRuntime(Runtime::kStackGuard, 0, 1); -} - - -void InterruptStub::Generate(MacroAssembler* masm) { - __ TailCallRuntime(Runtime::kInterrupt, 0, 1); -} - - static void GenerateRecordCallTarget(MacroAssembler* masm) { // Cache the called function in a global property cell. Cache states // are uninitialized, monomorphic (indicated by a JSFunction), and diff --git a/src/ia32/deoptimizer-ia32.cc b/src/ia32/deoptimizer-ia32.cc index f84b813..a4f7ee8 100644 --- a/src/ia32/deoptimizer-ia32.cc +++ b/src/ia32/deoptimizer-ia32.cc @@ -246,10 +246,9 @@ Deoptimizer::InterruptPatchState Deoptimizer::GetInterruptPatchState( return PATCHED_FOR_OSR; } else { // Get the interrupt stub code object to match against from cache. - Code* interrupt_code = NULL; - InterruptStub stub; - if (!stub.FindCodeInCache(&interrupt_code, isolate)) UNREACHABLE(); - ASSERT_EQ(interrupt_code->entry(), + Code* interrupt_builtin = + isolate->builtins()->builtin(Builtins::kInterruptCheck); + ASSERT_EQ(interrupt_builtin->entry(), Assembler::target_address_at(call_target_address)); ASSERT_EQ(kJnsInstruction, *(call_target_address - 3)); ASSERT_EQ(kJnsOffset, *(call_target_address - 2)); diff --git a/src/ia32/full-codegen-ia32.cc b/src/ia32/full-codegen-ia32.cc index 775a168..09966c9 100644 --- a/src/ia32/full-codegen-ia32.cc +++ b/src/ia32/full-codegen-ia32.cc @@ -288,8 +288,7 @@ void FullCodeGenerator::Generate() { ExternalReference::address_of_stack_limit(isolate()); __ cmp(esp, Operand::StaticVariable(stack_limit)); __ j(above_equal, &ok, Label::kNear); - StackCheckStub stub; - __ CallStub(&stub); + __ call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET); __ bind(&ok); } @@ -347,8 +346,7 @@ void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, } EmitProfilingCounterDecrement(weight); __ j(positive, &ok, Label::kNear); - InterruptStub stub; - __ CallStub(&stub); + __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET); // Record a mapping of this PC offset to the OSR id. This is used to find // the AST id from the unoptimized code in order to use it as a key into @@ -395,8 +393,8 @@ void FullCodeGenerator::EmitReturnSequence() { __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1); } else { - InterruptStub stub; - __ CallStub(&stub); + __ call(isolate()->builtins()->InterruptCheck(), + RelocInfo::CODE_TARGET); } __ pop(eax); EmitProfilingCounterReset(); diff --git a/src/ia32/lithium-codegen-ia32.cc b/src/ia32/lithium-codegen-ia32.cc index e7e0327..15b0990 100644 --- a/src/ia32/lithium-codegen-ia32.cc +++ b/src/ia32/lithium-codegen-ia32.cc @@ -6430,8 +6430,9 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) { ASSERT(instr->context()->IsRegister()); ASSERT(ToRegister(instr->context()).is(esi)); - StackCheckStub stub; - CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); + CallCode(isolate()->builtins()->StackCheck(), + RelocInfo::CODE_TARGET, + instr); EnsureSpaceForLazyDeopt(); __ bind(&done); RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); diff --git a/src/mips/code-stubs-mips.cc b/src/mips/code-stubs-mips.cc index 710901e..8b0fdb2 100644 --- a/src/mips/code-stubs-mips.cc +++ b/src/mips/code-stubs-mips.cc @@ -2380,16 +2380,6 @@ Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() { } -void StackCheckStub::Generate(MacroAssembler* masm) { - __ TailCallRuntime(Runtime::kStackGuard, 0, 1); -} - - -void InterruptStub::Generate(MacroAssembler* masm) { - __ TailCallRuntime(Runtime::kInterrupt, 0, 1); -} - - void MathPowStub::Generate(MacroAssembler* masm) { const Register base = a1; const Register exponent = a2; diff --git a/src/mips/deoptimizer-mips.cc b/src/mips/deoptimizer-mips.cc index 31d31a0..bed6e12 100644 --- a/src/mips/deoptimizer-mips.cc +++ b/src/mips/deoptimizer-mips.cc @@ -149,12 +149,11 @@ Deoptimizer::InterruptPatchState Deoptimizer::GetInterruptPatchState( return PATCHED_FOR_OSR; } else { // Get the interrupt stub code object to match against from cache. - Code* interrupt_code = NULL; - InterruptStub stub; - if (!stub.FindCodeInCache(&interrupt_code, isolate)) UNREACHABLE(); + Code* interrupt_builtin = + isolate->builtins()->builtin(Builtins::kInterruptCheck); ASSERT(reinterpret_cast( Assembler::target_address_at(pc_after - 4 * kInstrSize)) == - reinterpret_cast(interrupt_code->entry())); + reinterpret_cast(interrupt_builtin->entry())); return NOT_PATCHED; } } diff --git a/src/mips/full-codegen-mips.cc b/src/mips/full-codegen-mips.cc index d59820f..37d6bcf 100644 --- a/src/mips/full-codegen-mips.cc +++ b/src/mips/full-codegen-mips.cc @@ -298,8 +298,7 @@ void FullCodeGenerator::Generate() { Label ok; __ LoadRoot(t0, Heap::kStackLimitRootIndex); __ Branch(&ok, hs, sp, Operand(t0)); - StackCheckStub stub; - __ CallStub(&stub); + __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET); __ bind(&ok); } @@ -369,9 +368,8 @@ void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, EmitProfilingCounterDecrement(weight); __ slt(at, a3, zero_reg); __ beq(at, zero_reg, &ok); - // CallStub will emit a li t9 first, so it is safe to use the delay slot. - InterruptStub stub; - __ CallStub(&stub); + // Call will emit a li t9 first, so it is safe to use the delay slot. + __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET); // Record a mapping of this PC offset to the OSR id. This is used to find // the AST id from the unoptimized code in order to use it as a key into // the deoptimization input data found in the optimized code. @@ -418,8 +416,8 @@ void FullCodeGenerator::EmitReturnSequence() { __ push(a2); __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1); } else { - InterruptStub stub; - __ CallStub(&stub); + __ Call(isolate()->builtins()->InterruptCheck(), + RelocInfo::CODE_TARGET); } __ pop(v0); EmitProfilingCounterReset(); diff --git a/src/mips/lithium-codegen-mips.cc b/src/mips/lithium-codegen-mips.cc index d2ab06a..11aac0b 100644 --- a/src/mips/lithium-codegen-mips.cc +++ b/src/mips/lithium-codegen-mips.cc @@ -5670,8 +5670,9 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) { Label done; __ LoadRoot(at, Heap::kStackLimitRootIndex); __ Branch(&done, hs, sp, Operand(at)); - StackCheckStub stub; - CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); + CallCode(isolate()->builtins()->StackCheck(), + RelocInfo::CODE_TARGET, + instr); EnsureSpaceForLazyDeopt(); last_lazy_deopt_pc_ = masm()->pc_offset(); __ bind(&done); diff --git a/src/x64/code-stubs-x64.cc b/src/x64/code-stubs-x64.cc index 9eed917..83d01a6 100644 --- a/src/x64/code-stubs-x64.cc +++ b/src/x64/code-stubs-x64.cc @@ -3320,16 +3320,6 @@ void ICCompareStub::GenerateGeneric(MacroAssembler* masm) { } -void StackCheckStub::Generate(MacroAssembler* masm) { - __ TailCallRuntime(Runtime::kStackGuard, 0, 1); -} - - -void InterruptStub::Generate(MacroAssembler* masm) { - __ TailCallRuntime(Runtime::kInterrupt, 0, 1); -} - - static void GenerateRecordCallTarget(MacroAssembler* masm) { // Cache the called function in a global property cell. Cache states // are uninitialized, monomorphic (indicated by a JSFunction), and diff --git a/src/x64/deoptimizer-x64.cc b/src/x64/deoptimizer-x64.cc index 33d4f4d..bb8acce 100644 --- a/src/x64/deoptimizer-x64.cc +++ b/src/x64/deoptimizer-x64.cc @@ -151,10 +151,9 @@ Deoptimizer::InterruptPatchState Deoptimizer::GetInterruptPatchState( return PATCHED_FOR_OSR; } else { // Get the interrupt stub code object to match against from cache. - Code* interrupt_code = NULL; - InterruptStub stub; - if (!stub.FindCodeInCache(&interrupt_code, isolate)) UNREACHABLE(); - ASSERT_EQ(interrupt_code->entry(), + Code* interrupt_builtin = + isolate->builtins()->builtin(Builtins::kInterruptCheck); + ASSERT_EQ(interrupt_builtin->entry(), Assembler::target_address_at(call_target_address)); ASSERT_EQ(kJnsInstruction, *(call_target_address - 3)); ASSERT_EQ(kJnsOffset, *(call_target_address - 2)); diff --git a/src/x64/full-codegen-x64.cc b/src/x64/full-codegen-x64.cc index 04005ac..54472c2 100644 --- a/src/x64/full-codegen-x64.cc +++ b/src/x64/full-codegen-x64.cc @@ -280,8 +280,7 @@ void FullCodeGenerator::Generate() { Label ok; __ CompareRoot(rsp, Heap::kStackLimitRootIndex); __ j(above_equal, &ok, Label::kNear); - StackCheckStub stub; - __ CallStub(&stub); + __ call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET); __ bind(&ok); } @@ -341,8 +340,7 @@ void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, } EmitProfilingCounterDecrement(weight); __ j(positive, &ok, Label::kNear); - InterruptStub stub; - __ CallStub(&stub); + __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET); // Record a mapping of this PC offset to the OSR id. This is used to find // the AST id from the unoptimized code in order to use it as a key into @@ -388,8 +386,8 @@ void FullCodeGenerator::EmitReturnSequence() { __ push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1); } else { - InterruptStub stub; - __ CallStub(&stub); + __ call(isolate()->builtins()->InterruptCheck(), + RelocInfo::CODE_TARGET); } __ pop(rax); EmitProfilingCounterReset(); diff --git a/src/x64/lithium-codegen-x64.cc b/src/x64/lithium-codegen-x64.cc index ce66285..fd9bf18 100644 --- a/src/x64/lithium-codegen-x64.cc +++ b/src/x64/lithium-codegen-x64.cc @@ -5419,8 +5419,9 @@ void LCodeGen::DoStackCheck(LStackCheck* instr) { Label done; __ CompareRoot(rsp, Heap::kStackLimitRootIndex); __ j(above_equal, &done, Label::kNear); - StackCheckStub stub; - CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); + CallCode(isolate()->builtins()->StackCheck(), + RelocInfo::CODE_TARGET, + instr); EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); last_lazy_deopt_pc_ = masm()->pc_offset(); __ bind(&done);