Remove --check-stack flag from V8.
authorwhesse@chromium.org <whesse@chromium.org@ce2b1a6d-e550-0410-aec6-3dcde31c8c00>
Tue, 27 Oct 2009 14:56:50 +0000 (14:56 +0000)
committerwhesse@chromium.org <whesse@chromium.org@ce2b1a6d-e550-0410-aec6-3dcde31c8c00>
Tue, 27 Oct 2009 14:56:50 +0000 (14:56 +0000)
Review URL: http://codereview.chromium.org/338017

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@3149 ce2b1a6d-e550-0410-aec6-3dcde31c8c00

src/arm/codegen-arm.cc
src/arm/fast-codegen-arm.cc
src/arm/regexp-macro-assembler-arm.cc
src/arm/virtual-frame-arm.cc
src/flag-definitions.h
src/ia32/builtins-ia32.cc
src/ia32/codegen-ia32.cc
src/ia32/regexp-macro-assembler-ia32.cc
src/x64/builtins-x64.cc
src/x64/codegen-x64.cc
src/x64/regexp-macro-assembler-x64.cc

index 47f0e96..dd88515 100644 (file)
@@ -1122,22 +1122,20 @@ void CodeGenerator::Branch(bool if_true, JumpTarget* target) {
 
 void CodeGenerator::CheckStack() {
   VirtualFrame::SpilledScope spilled_scope;
-  if (FLAG_check_stack) {
-    Comment cmnt(masm_, "[ check stack");
-    __ LoadRoot(ip, Heap::kStackLimitRootIndex);
-    // Put the lr setup instruction in the delay slot.  kInstrSize is added to
-    // the implicit 8 byte offset that always applies to operations with pc and
-    // gives a return address 12 bytes down.
-    masm_->add(lr, pc, Operand(Assembler::kInstrSize));
-    masm_->cmp(sp, Operand(ip));
-    StackCheckStub stub;
-    // Call the stub if lower.
-    masm_->mov(pc,
-               Operand(reinterpret_cast<intptr_t>(stub.GetCode().location()),
-                       RelocInfo::CODE_TARGET),
-               LeaveCC,
-               lo);
-  }
+  Comment cmnt(masm_, "[ check stack");
+  __ LoadRoot(ip, Heap::kStackLimitRootIndex);
+  // Put the lr setup instruction in the delay slot.  kInstrSize is added to
+  // the implicit 8 byte offset that always applies to operations with pc and
+  // gives a return address 12 bytes down.
+  masm_->add(lr, pc, Operand(Assembler::kInstrSize));
+  masm_->cmp(sp, Operand(ip));
+  StackCheckStub stub;
+  // Call the stub if lower.
+  masm_->mov(pc,
+             Operand(reinterpret_cast<intptr_t>(stub.GetCode().location()),
+                     RelocInfo::CODE_TARGET),
+             LeaveCC,
+             lo);
 }
 
 
index 757a03f..bb5edc6 100644 (file)
@@ -63,28 +63,25 @@ void FastCodeGenerator::Generate(FunctionLiteral* fun) {
     if (locals_count > 0) {
       __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
     }
-    if (FLAG_check_stack) {
-      __ LoadRoot(r2, Heap::kStackLimitRootIndex);
-    }
+    __ LoadRoot(r2, Heap::kStackLimitRootIndex);
     for (int i = 0; i < locals_count; i++) {
       __ push(ip);
     }
   }
 
-  if (FLAG_check_stack) {
-    // Put the lr setup instruction in the delay slot.  The kInstrSize is
-    // added to the implicit 8 byte offset that always applies to operations
-    // with pc and gives a return address 12 bytes down.
-    Comment cmnt(masm_, "[ Stack check");
-    __ add(lr, pc, Operand(Assembler::kInstrSize));
-    __ cmp(sp, Operand(r2));
-    StackCheckStub stub;
-    __ mov(pc,
-           Operand(reinterpret_cast<intptr_t>(stub.GetCode().location()),
-                   RelocInfo::CODE_TARGET),
-           LeaveCC,
-           lo);
-  }
+  // Check the stack for overflow or break request.
+  // Put the lr setup instruction in the delay slot.  The kInstrSize is
+  // added to the implicit 8 byte offset that always applies to operations
+  // with pc and gives a return address 12 bytes down.
+  Comment cmnt(masm_, "[ Stack check");
+  __ add(lr, pc, Operand(Assembler::kInstrSize));
+  __ cmp(sp, Operand(r2));
+  StackCheckStub stub;
+  __ mov(pc,
+         Operand(reinterpret_cast<intptr_t>(stub.GetCode().location()),
+                 RelocInfo::CODE_TARGET),
+         LeaveCC,
+         lo);
 
   { Comment cmnt(masm_, "[ Declarations");
     VisitDeclarations(fun->scope()->declarations());
index addc07d..bd50428 100644 (file)
@@ -1100,14 +1100,12 @@ void RegExpMacroAssemblerARM::CheckPreemption() {
 
 
 void RegExpMacroAssemblerARM::CheckStackLimit() {
-  if (FLAG_check_stack) {
-    ExternalReference stack_limit =
-        ExternalReference::address_of_regexp_stack_limit();
-    __ mov(r0, Operand(stack_limit));
-    __ ldr(r0, MemOperand(r0));
-    __ cmp(backtrack_stackpointer(), Operand(r0));
-    SafeCall(&stack_overflow_label_, ls);
-  }
+  ExternalReference stack_limit =
+      ExternalReference::address_of_regexp_stack_limit();
+  __ mov(r0, Operand(stack_limit));
+  __ ldr(r0, MemOperand(r0));
+  __ cmp(backtrack_stackpointer(), Operand(r0));
+  SafeCall(&stack_overflow_label_, ls);
 }
 
 
index 97d164e..47ecb96 100644 (file)
@@ -146,29 +146,27 @@ void VirtualFrame::AllocateStackSlots() {
       // Initialize stack slots with 'undefined' value.
     __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
   }
-  if (FLAG_check_stack) {
-    __ LoadRoot(r2, Heap::kStackLimitRootIndex);
-  }
+  __ LoadRoot(r2, Heap::kStackLimitRootIndex);
   for (int i = 0; i < count; i++) {
     __ push(ip);
   }
-  if (FLAG_check_stack) {
-    // Put the lr setup instruction in the delay slot.  The kInstrSize is added
-    // to the implicit 8 byte offset that always applies to operations with pc
-    // and gives a return address 12 bytes down.
-    masm()->add(lr, pc, Operand(Assembler::kInstrSize));
-    masm()->cmp(sp, Operand(r2));
-    StackCheckStub stub;
-    // Call the stub if lower.
-    masm()->mov(pc,
-                Operand(reinterpret_cast<intptr_t>(stub.GetCode().location()),
-                        RelocInfo::CODE_TARGET),
-                LeaveCC,
-                lo);
-  }
+  // Check the stack for overflow or a break request.
+  // Put the lr setup instruction in the delay slot.  The kInstrSize is added
+  // to the implicit 8 byte offset that always applies to operations with pc
+  // and gives a return address 12 bytes down.
+  masm()->add(lr, pc, Operand(Assembler::kInstrSize));
+  masm()->cmp(sp, Operand(r2));
+  StackCheckStub stub;
+  // Call the stub if lower.
+  masm()->mov(pc,
+              Operand(reinterpret_cast<intptr_t>(stub.GetCode().location()),
+                      RelocInfo::CODE_TARGET),
+              LeaveCC,
+              lo);
 }
 
 
+
 void VirtualFrame::SaveContextRegister() {
   UNIMPLEMENTED();
 }
index 42c96b6..1ceb672 100644 (file)
@@ -132,8 +132,6 @@ DEFINE_bool(stack_trace_on_abort, true,
 // codegen-ia32.cc / codegen-arm.cc
 DEFINE_bool(trace, false, "trace function calls")
 DEFINE_bool(defer_negation, true, "defer negation operation")
-DEFINE_bool(check_stack, true,
-            "check stack for overflow, interrupt, breakpoint")
 
 // codegen.cc
 DEFINE_bool(lazy, true, "use lazy compilation")
index ad44026..4381b22 100644 (file)
@@ -520,48 +520,48 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
   __ push(Operand(ebp, 2 * kPointerSize));  // push arguments
   __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
 
-  if (FLAG_check_stack) {
-    // We need to catch preemptions right here, otherwise an unlucky preemption
-    // could show up as a failed apply.
-    ExternalReference stack_guard_limit =
-        ExternalReference::address_of_stack_guard_limit();
-    Label retry_preemption;
-    Label no_preemption;
-    __ bind(&retry_preemption);
-    __ mov(edi, Operand::StaticVariable(stack_guard_limit));
-    __ cmp(esp, Operand(edi));
-    __ j(above, &no_preemption, taken);
-
-    // Preemption!
-    // Because builtins always remove the receiver from the stack, we
-    // have to fake one to avoid underflowing the stack.
-    __ push(eax);
-    __ push(Immediate(Smi::FromInt(0)));
+  // Check the stack for overflow or a break request.
+  // We need to catch preemptions right here, otherwise an unlucky preemption
+  // could show up as a failed apply.
+  ExternalReference stack_guard_limit =
+      ExternalReference::address_of_stack_guard_limit();
+  Label retry_preemption;
+  Label no_preemption;
+  __ bind(&retry_preemption);
+  __ mov(edi, Operand::StaticVariable(stack_guard_limit));
+  __ cmp(esp, Operand(edi));
+  __ j(above, &no_preemption, taken);
+
+  // Preemption!
+  // Because builtins always remove the receiver from the stack, we
+  // have to fake one to avoid underflowing the stack.
+  __ push(eax);
+  __ push(Immediate(Smi::FromInt(0)));
 
-    // Do call to runtime routine.
-    __ CallRuntime(Runtime::kStackGuard, 1);
-    __ pop(eax);
-    __ jmp(&retry_preemption);
-
-    __ bind(&no_preemption);
-
-    Label okay;
-    // Make ecx the space we have left.
-    __ mov(ecx, Operand(esp));
-    __ sub(ecx, Operand(edi));
-    // Make edx the space we need for the array when it is unrolled onto the
-    // stack.
-    __ mov(edx, Operand(eax));
-    __ shl(edx, kPointerSizeLog2 - kSmiTagSize);
-    __ cmp(ecx, Operand(edx));
-    __ j(greater, &okay, taken);
-
-    // Too bad: Out of stack space.
-    __ push(Operand(ebp, 4 * kPointerSize));  // push this
-    __ push(eax);
-    __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
-    __ bind(&okay);
-  }
+  // Do call to runtime routine.
+  __ CallRuntime(Runtime::kStackGuard, 1);
+  __ pop(eax);
+  __ jmp(&retry_preemption);
+
+  __ bind(&no_preemption);
+
+  Label okay;
+  // Make ecx the space we have left.
+  __ mov(ecx, Operand(esp));
+  __ sub(ecx, Operand(edi));
+  // Make edx the space we need for the array when it is unrolled onto the
+  // stack.
+  __ mov(edx, Operand(eax));
+  __ shl(edx, kPointerSizeLog2 - kSmiTagSize);
+  __ cmp(ecx, Operand(edx));
+  __ j(greater, &okay, taken);
+
+  // Too bad: Out of stack space.
+  __ push(Operand(ebp, 4 * kPointerSize));  // push this
+  __ push(eax);
+  __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
+  __ bind(&okay);
+  // End of stack check.
 
   // Push current index and limit.
   const int kLimitOffset =
index 003876b..4ac5527 100644 (file)
@@ -2203,14 +2203,12 @@ void DeferredStackCheck::Generate() {
 
 
 void CodeGenerator::CheckStack() {
-  if (FLAG_check_stack) {
-    DeferredStackCheck* deferred = new DeferredStackCheck;
-    ExternalReference stack_guard_limit =
-        ExternalReference::address_of_stack_guard_limit();
-    __ cmp(esp, Operand::StaticVariable(stack_guard_limit));
-    deferred->Branch(below);
-    deferred->BindExit();
-  }
+  DeferredStackCheck* deferred = new DeferredStackCheck;
+  ExternalReference stack_guard_limit =
+      ExternalReference::address_of_stack_guard_limit();
+  __ cmp(esp, Operand::StaticVariable(stack_guard_limit));
+  deferred->Branch(below);
+  deferred->BindExit();
 }
 
 
index b878550..76d36a9 100644 (file)
@@ -1093,17 +1093,15 @@ void RegExpMacroAssemblerIA32::CheckPreemption() {
 
 
 void RegExpMacroAssemblerIA32::CheckStackLimit() {
-  if (FLAG_check_stack) {
-    Label no_stack_overflow;
-    ExternalReference stack_limit =
-        ExternalReference::address_of_regexp_stack_limit();
-    __ cmp(backtrack_stackpointer(), Operand::StaticVariable(stack_limit));
-    __ j(above, &no_stack_overflow);
+  Label no_stack_overflow;
+  ExternalReference stack_limit =
+      ExternalReference::address_of_regexp_stack_limit();
+  __ cmp(backtrack_stackpointer(), Operand::StaticVariable(stack_limit));
+  __ j(above, &no_stack_overflow);
 
-    SafeCall(&stack_overflow_label_);
+  SafeCall(&stack_overflow_label_);
 
-    __ bind(&no_stack_overflow);
-  }
+  __ bind(&no_stack_overflow);
 }
 
 
index 01992ce..ea6c379 100644 (file)
@@ -318,47 +318,47 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
   __ push(Operand(rbp, kArgumentsOffset));
   __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
 
-  if (FLAG_check_stack) {
-    // We need to catch preemptions right here, otherwise an unlucky preemption
-    // could show up as a failed apply.
-    Label retry_preemption;
-    Label no_preemption;
-    __ bind(&retry_preemption);
-    ExternalReference stack_guard_limit =
-        ExternalReference::address_of_stack_guard_limit();
-    __ movq(kScratchRegister, stack_guard_limit);
-    __ movq(rcx, rsp);
-    __ subq(rcx, Operand(kScratchRegister, 0));
-    // rcx contains the difference between the stack limit and the stack top.
-    // We use it below to check that there is enough room for the arguments.
-    __ j(above, &no_preemption);
-
-    // Preemption!
-    // Because runtime functions always remove the receiver from the stack, we
-    // have to fake one to avoid underflowing the stack.
-    __ push(rax);
-    __ Push(Smi::FromInt(0));
+  // Check the stack for overflow or a break request.
+  // We need to catch preemptions right here, otherwise an unlucky preemption
+  // could show up as a failed apply.
+  Label retry_preemption;
+  Label no_preemption;
+  __ bind(&retry_preemption);
+  ExternalReference stack_guard_limit =
+      ExternalReference::address_of_stack_guard_limit();
+  __ movq(kScratchRegister, stack_guard_limit);
+  __ movq(rcx, rsp);
+  __ subq(rcx, Operand(kScratchRegister, 0));
+  // rcx contains the difference between the stack limit and the stack top.
+  // We use it below to check that there is enough room for the arguments.
+  __ j(above, &no_preemption);
+
+  // Preemption!
+  // Because runtime functions always remove the receiver from the stack, we
+  // have to fake one to avoid underflowing the stack.
+  __ push(rax);
+  __ Push(Smi::FromInt(0));
 
-    // Do call to runtime routine.
-    __ CallRuntime(Runtime::kStackGuard, 1);
-    __ pop(rax);
-    __ jmp(&retry_preemption);
+  // Do call to runtime routine.
+  __ CallRuntime(Runtime::kStackGuard, 1);
+  __ pop(rax);
+  __ jmp(&retry_preemption);
 
-    __ bind(&no_preemption);
+  __ bind(&no_preemption);
 
-    Label okay;
-    // Make rdx the space we need for the array when it is unrolled onto the
-    // stack.
-    __ PositiveSmiTimesPowerOfTwoToInteger64(rdx, rax, kPointerSizeLog2);
-    __ cmpq(rcx, rdx);
-    __ j(greater, &okay);
+  Label okay;
+  // Make rdx the space we need for the array when it is unrolled onto the
+  // stack.
+  __ PositiveSmiTimesPowerOfTwoToInteger64(rdx, rax, kPointerSizeLog2);
+  __ cmpq(rcx, rdx);
+  __ j(greater, &okay);
 
-    // Too bad: Out of stack space.
-    __ push(Operand(rbp, kFunctionOffset));
-    __ push(rax);
-    __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
-    __ bind(&okay);
-  }
+  // Too bad: Out of stack space.
+  __ push(Operand(rbp, kFunctionOffset));
+  __ push(rax);
+  __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
+  __ bind(&okay);
+  // End of stack check.
 
   // Push current index and limit.
   const int kLimitOffset =
index 33c06f7..95f30d8 100644 (file)
@@ -852,12 +852,10 @@ void DeferredStackCheck::Generate() {
 
 
 void CodeGenerator::CheckStack() {
-  if (FLAG_check_stack) {
-    DeferredStackCheck* deferred = new DeferredStackCheck;
-    __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
-    deferred->Branch(below);
-    deferred->BindExit();
-  }
+  DeferredStackCheck* deferred = new DeferredStackCheck;
+  __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
+  deferred->Branch(below);
+  deferred->BindExit();
 }
 
 
index d8dcc37..88636f8 100644 (file)
@@ -1209,18 +1209,16 @@ void RegExpMacroAssemblerX64::CheckPreemption() {
 
 
 void RegExpMacroAssemblerX64::CheckStackLimit() {
-  if (FLAG_check_stack) {
-    Label no_stack_overflow;
-    ExternalReference stack_limit =
-        ExternalReference::address_of_regexp_stack_limit();
-    __ load_rax(stack_limit);
-    __ cmpq(backtrack_stackpointer(), rax);
-    __ j(above, &no_stack_overflow);
+  Label no_stack_overflow;
+  ExternalReference stack_limit =
+      ExternalReference::address_of_regexp_stack_limit();
+  __ load_rax(stack_limit);
+  __ cmpq(backtrack_stackpointer(), rax);
+  __ j(above, &no_stack_overflow);
 
-    SafeCall(&stack_overflow_label_);
+  SafeCall(&stack_overflow_label_);
 
-    __ bind(&no_stack_overflow);
-  }
+  __ bind(&no_stack_overflow);
 }