Isolate* isolate = masm_->isolate();
Label ok;
ASSERT(scratch.is(sp) == (pointers == 0));
+ Heap::RootListIndex index;
if (pointers != 0) {
__ sub(scratch, sp, Operand(pointers * kPointerSize));
+ index = Heap::kRealStackLimitRootIndex;
+ } else {
+ index = Heap::kStackLimitRootIndex;
}
- __ LoadRoot(stack_limit_scratch, Heap::kStackLimitRootIndex);
+ __ LoadRoot(stack_limit_scratch, index);
__ cmp(scratch, Operand(stack_limit_scratch));
__ b(hs, &ok);
PredictableCodeSizeScope predictable(masm_, 2 * Assembler::kInstrSize);
Label ok;
ASSERT(jssp.Is(__ StackPointer()));
ASSERT(scratch.Is(jssp) == (pointers == 0));
+ Heap::RootListIndex index;
if (pointers != 0) {
__ Sub(scratch, jssp, pointers * kPointerSize);
+ index = Heap::kRealStackLimitRootIndex;
+ } else {
+ index = Heap::kStackLimitRootIndex;
}
- __ CompareRoot(scratch, Heap::kStackLimitRootIndex);
+ __ CompareRoot(scratch, index);
__ B(hs, &ok);
PredictableCodeSizeScope predictable(masm_,
Assembler::kCallSizeWithRelocation);
Register scratch = esp) {
Label ok;
Isolate* isolate = masm_->isolate();
- ExternalReference stack_limit =
- ExternalReference::address_of_stack_limit(isolate);
ASSERT(scratch.is(esp) == (pointers == 0));
+ ExternalReference stack_limit;
if (pointers != 0) {
__ mov(scratch, esp);
__ sub(scratch, Immediate(pointers * kPointerSize));
+ stack_limit = ExternalReference::address_of_real_stack_limit(isolate);
+ } else {
+ stack_limit = ExternalReference::address_of_stack_limit(isolate);
}
__ cmp(scratch, Operand::StaticVariable(stack_limit));
__ j(above_equal, &ok, Label::kNear);
Isolate* isolate = masm_->isolate();
Label ok;
ASSERT(scratch.is(sp) == (pointers == 0));
+ Heap::RootListIndex index;
if (pointers != 0) {
__ Subu(scratch, sp, Operand(pointers * kPointerSize));
+ index = Heap::kRealStackLimitRootIndex;
+ } else {
+ index = Heap::kStackLimitRootIndex;
}
- __ LoadRoot(stack_limit_scratch, Heap::kStackLimitRootIndex);
+ __ LoadRoot(stack_limit_scratch, index);
__ Branch(&ok, hs, scratch, Operand(stack_limit_scratch));
PredictableCodeSizeScope predictable(masm_, 4 * Assembler::kInstrSize);
__ Call(isolate->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
Isolate* isolate = masm_->isolate();
Label ok;
ASSERT(scratch.is(rsp) == (pointers == 0));
+ Heap::RootListIndex index;
if (pointers != 0) {
__ movp(scratch, rsp);
__ subp(scratch, Immediate(pointers * kPointerSize));
+ index = Heap::kRealStackLimitRootIndex;
+ } else {
+ index = Heap::kStackLimitRootIndex;
}
- __ CompareRoot(scratch, Heap::kStackLimitRootIndex);
+ __ CompareRoot(scratch, index);
__ j(above_equal, &ok, Label::kNear);
__ call(isolate->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
__ bind(&ok);
}
#endif // DEBUG
+
+
+static void InterruptCallback357137(v8::Isolate* isolate, void* data) { }
+
+
+static void RequestInterrupt(const v8::FunctionCallbackInfo<v8::Value>& args) {
+ CcTest::isolate()->RequestInterrupt(&InterruptCallback357137, NULL);
+}
+
+
+TEST(Regress357137) {
+ CcTest::InitializeVM();
+ v8::Isolate* isolate = CcTest::isolate();
+ v8::HandleScope hscope(isolate);
+ v8::Handle<v8::ObjectTemplate> global =v8::ObjectTemplate::New(isolate);
+ global->Set(v8::String::NewFromUtf8(isolate, "interrupt"),
+ v8::FunctionTemplate::New(isolate, RequestInterrupt));
+ v8::Local<v8::Context> context = v8::Context::New(isolate, NULL, global);
+ ASSERT(!context.IsEmpty());
+ v8::Context::Scope cscope(context);
+
+ v8::Local<v8::Value> result = CompileRun(
+ "var locals = '';"
+ "for (var i = 0; i < 512; i++) locals += 'var v' + i + '= 42;';"
+ "eval('function f() {' + locals + 'return function() { return v0; }; }');"
+ "interrupt();" // This triggers a fake stack overflow in f.
+ "f()()");
+ CHECK_EQ(42.0, result->ToNumber()->Value());
+}
--- /dev/null
+// Copyright 2014 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+var locals = "";
+for (var i = 0; i < 1024; i++) locals += "var v" + i + ";";
+eval("function f() {" + locals + "f();}");
+assertThrows("f()", RangeError);