}
-void StackCheckStub::Generate(MacroAssembler* masm) {
- __ TailCallRuntime(Runtime::kStackGuard, 0, 1);
-}
-
-
-void InterruptStub::Generate(MacroAssembler* masm) {
- __ TailCallRuntime(Runtime::kInterrupt, 0, 1);
-}
-
-
void MathPowStub::Generate(MacroAssembler* masm) {
const Register base = r1;
const Register exponent = r2;
return PATCHED_FOR_OSR;
} else {
// Get the interrupt stub code object to match against from cache.
- Code* interrupt_code = NULL;
- InterruptStub stub;
- if (!stub.FindCodeInCache(&interrupt_code, isolate)) UNREACHABLE();
+ Code* interrupt_builtin =
+ isolate->builtins()->builtin(Builtins::kInterruptCheck);
ASSERT(Assembler::IsLdrPcImmediateOffset(
Assembler::instr_at(pc_after - 2 * kInstrSize)));
ASSERT_EQ(kBranchBeforeInterrupt,
Memory::int32_at(pc_after - 3 * kInstrSize));
- ASSERT(reinterpret_cast<uint32_t>(interrupt_code->entry()) ==
+ ASSERT(reinterpret_cast<uint32_t>(interrupt_builtin->entry()) ==
Memory::uint32_at(interrupt_address_pointer));
return NOT_PATCHED;
}
__ cmp(sp, Operand(ip));
__ b(hs, &ok);
PredictableCodeSizeScope predictable(masm_, 2 * Assembler::kInstrSize);
- StackCheckStub stub;
- __ CallStub(&stub);
+ __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
__ bind(&ok);
}
}
EmitProfilingCounterDecrement(weight);
__ b(pl, &ok);
- InterruptStub stub;
- __ CallStub(&stub);
+ __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
// Record a mapping of this PC offset to the OSR id. This is used to find
// the AST id from the unoptimized code in order to use it as a key into
__ push(r2);
__ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1);
} else {
- InterruptStub stub;
- __ CallStub(&stub);
+ __ Call(isolate()->builtins()->InterruptCheck(),
+ RelocInfo::CODE_TARGET);
}
__ pop(r0);
EmitProfilingCounterReset();
__ LoadRoot(ip, Heap::kStackLimitRootIndex);
__ cmp(sp, Operand(ip));
__ b(hs, &done);
- StackCheckStub stub;
PredictableCodeSizeScope predictable(masm_, 2 * Assembler::kInstrSize);
- CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
+ CallCode(isolate()->builtins()->StackCheck(),
+ RelocInfo::CODE_TARGET,
+ instr);
EnsureSpaceForLazyDeopt();
last_lazy_deopt_pc_ = masm()->pc_offset();
__ bind(&done);
}
+void Builtins::Generate_InterruptCheck(MacroAssembler* masm) {
+ masm->TailCallRuntime(Runtime::kInterrupt, 0, 1);
+}
+
+
+void Builtins::Generate_StackCheck(MacroAssembler* masm) {
+ masm->TailCallRuntime(Runtime::kStackGuard, 0, 1);
+}
+
+
#define DEFINE_BUILTIN_ACCESSOR_C(name, ignore) \
Handle<Code> Builtins::name() { \
Code** code_address = \
\
V(OnStackReplacement, BUILTIN, UNINITIALIZED, \
Code::kNoExtraICState) \
+ V(InterruptCheck, BUILTIN, UNINITIALIZED, \
+ Code::kNoExtraICState) \
+ V(StackCheck, BUILTIN, UNINITIALIZED, \
+ Code::kNoExtraICState) \
CODE_AGE_LIST_WITH_ARG(DECLARE_CODE_AGE_BUILTIN, V)
#ifdef ENABLE_DEBUGGER_SUPPORT
static void Generate_StringConstructCode(MacroAssembler* masm);
static void Generate_OnStackReplacement(MacroAssembler* masm);
+ static void Generate_InterruptCheck(MacroAssembler* masm);
+ static void Generate_StackCheck(MacroAssembler* masm);
+
#define DECLARE_CODE_AGE_BUILTIN_GENERATOR(C) \
static void Generate_Make##C##CodeYoungAgainEvenMarking( \
MacroAssembler* masm); \
};
-class StackCheckStub : public PlatformCodeStub {
- public:
- StackCheckStub() { }
-
- void Generate(MacroAssembler* masm);
-
- private:
- Major MajorKey() { return StackCheck; }
- int MinorKey() { return 0; }
-};
-
-
-class InterruptStub : public PlatformCodeStub {
- public:
- InterruptStub() { }
-
- void Generate(MacroAssembler* masm);
-
- private:
- Major MajorKey() { return Interrupt; }
- int MinorKey() { return 0; }
-};
-
-
class ToNumberStub: public HydrogenCodeStub {
public:
ToNumberStub() { }
void Deoptimizer::PatchInterruptCode(Isolate* isolate,
Code* unoptimized_code) {
DisallowHeapAllocation no_gc;
- // Get the interrupt stub code object to match against. We aren't
- // prepared to generate it, but we don't expect to have to.
- Code* interrupt_code = NULL;
- InterruptStub interrupt_stub;
- CHECK(interrupt_stub.FindCodeInCache(&interrupt_code, isolate));
Code* replacement_code =
isolate->builtins()->builtin(Builtins::kOnStackReplacement);
void Deoptimizer::RevertInterruptCode(Isolate* isolate,
Code* unoptimized_code) {
- InterruptStub interrupt_stub;
- Code* interrupt_code = *interrupt_stub.GetCode(isolate);
DisallowHeapAllocation no_gc;
+ Code* interrupt_code =
+ isolate->builtins()->builtin(Builtins::kInterruptCheck);
// Iterate over the back edge table and revert the patched interrupt calls.
ASSERT(unoptimized_code->back_edges_patched_for_osr());
}
-void StackCheckStub::Generate(MacroAssembler* masm) {
- __ TailCallRuntime(Runtime::kStackGuard, 0, 1);
-}
-
-
-void InterruptStub::Generate(MacroAssembler* masm) {
- __ TailCallRuntime(Runtime::kInterrupt, 0, 1);
-}
-
-
static void GenerateRecordCallTarget(MacroAssembler* masm) {
// Cache the called function in a global property cell. Cache states
// are uninitialized, monomorphic (indicated by a JSFunction), and
return PATCHED_FOR_OSR;
} else {
// Get the interrupt stub code object to match against from cache.
- Code* interrupt_code = NULL;
- InterruptStub stub;
- if (!stub.FindCodeInCache(&interrupt_code, isolate)) UNREACHABLE();
- ASSERT_EQ(interrupt_code->entry(),
+ Code* interrupt_builtin =
+ isolate->builtins()->builtin(Builtins::kInterruptCheck);
+ ASSERT_EQ(interrupt_builtin->entry(),
Assembler::target_address_at(call_target_address));
ASSERT_EQ(kJnsInstruction, *(call_target_address - 3));
ASSERT_EQ(kJnsOffset, *(call_target_address - 2));
ExternalReference::address_of_stack_limit(isolate());
__ cmp(esp, Operand::StaticVariable(stack_limit));
__ j(above_equal, &ok, Label::kNear);
- StackCheckStub stub;
- __ CallStub(&stub);
+ __ call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
__ bind(&ok);
}
}
EmitProfilingCounterDecrement(weight);
__ j(positive, &ok, Label::kNear);
- InterruptStub stub;
- __ CallStub(&stub);
+ __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
// Record a mapping of this PC offset to the OSR id. This is used to find
// the AST id from the unoptimized code in order to use it as a key into
__ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
__ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1);
} else {
- InterruptStub stub;
- __ CallStub(&stub);
+ __ call(isolate()->builtins()->InterruptCheck(),
+ RelocInfo::CODE_TARGET);
}
__ pop(eax);
EmitProfilingCounterReset();
ASSERT(instr->context()->IsRegister());
ASSERT(ToRegister(instr->context()).is(esi));
- StackCheckStub stub;
- CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
+ CallCode(isolate()->builtins()->StackCheck(),
+ RelocInfo::CODE_TARGET,
+ instr);
EnsureSpaceForLazyDeopt();
__ bind(&done);
RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt);
}
-void StackCheckStub::Generate(MacroAssembler* masm) {
- __ TailCallRuntime(Runtime::kStackGuard, 0, 1);
-}
-
-
-void InterruptStub::Generate(MacroAssembler* masm) {
- __ TailCallRuntime(Runtime::kInterrupt, 0, 1);
-}
-
-
void MathPowStub::Generate(MacroAssembler* masm) {
const Register base = a1;
const Register exponent = a2;
return PATCHED_FOR_OSR;
} else {
// Get the interrupt stub code object to match against from cache.
- Code* interrupt_code = NULL;
- InterruptStub stub;
- if (!stub.FindCodeInCache(&interrupt_code, isolate)) UNREACHABLE();
+ Code* interrupt_builtin =
+ isolate->builtins()->builtin(Builtins::kInterruptCheck);
ASSERT(reinterpret_cast<uint32_t>(
Assembler::target_address_at(pc_after - 4 * kInstrSize)) ==
- reinterpret_cast<uint32_t>(interrupt_code->entry()));
+ reinterpret_cast<uint32_t>(interrupt_builtin->entry()));
return NOT_PATCHED;
}
}
Label ok;
__ LoadRoot(t0, Heap::kStackLimitRootIndex);
__ Branch(&ok, hs, sp, Operand(t0));
- StackCheckStub stub;
- __ CallStub(&stub);
+ __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
__ bind(&ok);
}
EmitProfilingCounterDecrement(weight);
__ slt(at, a3, zero_reg);
__ beq(at, zero_reg, &ok);
- // CallStub will emit a li t9 first, so it is safe to use the delay slot.
- InterruptStub stub;
- __ CallStub(&stub);
+ // Call will emit a li t9 first, so it is safe to use the delay slot.
+ __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
// Record a mapping of this PC offset to the OSR id. This is used to find
// the AST id from the unoptimized code in order to use it as a key into
// the deoptimization input data found in the optimized code.
__ push(a2);
__ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1);
} else {
- InterruptStub stub;
- __ CallStub(&stub);
+ __ Call(isolate()->builtins()->InterruptCheck(),
+ RelocInfo::CODE_TARGET);
}
__ pop(v0);
EmitProfilingCounterReset();
Label done;
__ LoadRoot(at, Heap::kStackLimitRootIndex);
__ Branch(&done, hs, sp, Operand(at));
- StackCheckStub stub;
- CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
+ CallCode(isolate()->builtins()->StackCheck(),
+ RelocInfo::CODE_TARGET,
+ instr);
EnsureSpaceForLazyDeopt();
last_lazy_deopt_pc_ = masm()->pc_offset();
__ bind(&done);
}
-void StackCheckStub::Generate(MacroAssembler* masm) {
- __ TailCallRuntime(Runtime::kStackGuard, 0, 1);
-}
-
-
-void InterruptStub::Generate(MacroAssembler* masm) {
- __ TailCallRuntime(Runtime::kInterrupt, 0, 1);
-}
-
-
static void GenerateRecordCallTarget(MacroAssembler* masm) {
// Cache the called function in a global property cell. Cache states
// are uninitialized, monomorphic (indicated by a JSFunction), and
return PATCHED_FOR_OSR;
} else {
// Get the interrupt stub code object to match against from cache.
- Code* interrupt_code = NULL;
- InterruptStub stub;
- if (!stub.FindCodeInCache(&interrupt_code, isolate)) UNREACHABLE();
- ASSERT_EQ(interrupt_code->entry(),
+ Code* interrupt_builtin =
+ isolate->builtins()->builtin(Builtins::kInterruptCheck);
+ ASSERT_EQ(interrupt_builtin->entry(),
Assembler::target_address_at(call_target_address));
ASSERT_EQ(kJnsInstruction, *(call_target_address - 3));
ASSERT_EQ(kJnsOffset, *(call_target_address - 2));
Label ok;
__ CompareRoot(rsp, Heap::kStackLimitRootIndex);
__ j(above_equal, &ok, Label::kNear);
- StackCheckStub stub;
- __ CallStub(&stub);
+ __ call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
__ bind(&ok);
}
}
EmitProfilingCounterDecrement(weight);
__ j(positive, &ok, Label::kNear);
- InterruptStub stub;
- __ CallStub(&stub);
+ __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
// Record a mapping of this PC offset to the OSR id. This is used to find
// the AST id from the unoptimized code in order to use it as a key into
__ push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
__ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1);
} else {
- InterruptStub stub;
- __ CallStub(&stub);
+ __ call(isolate()->builtins()->InterruptCheck(),
+ RelocInfo::CODE_TARGET);
}
__ pop(rax);
EmitProfilingCounterReset();
Label done;
__ CompareRoot(rsp, Heap::kStackLimitRootIndex);
__ j(above_equal, &done, Label::kNear);
- StackCheckStub stub;
- CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
+ CallCode(isolate()->builtins()->StackCheck(),
+ RelocInfo::CODE_TARGET,
+ instr);
EnsureSpaceForLazyDeopt(Deoptimizer::patch_size());
last_lazy_deopt_pc_ = masm()->pc_offset();
__ bind(&done);