From: whesse@chromium.org Date: Tue, 27 Oct 2009 14:56:50 +0000 (+0000) Subject: Remove --check-stack flag from V8. X-Git-Tag: upstream/4.7.83~23069 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=29914b6be7eb78e79c142eddd757e3c1bb034be9;p=platform%2Fupstream%2Fv8.git Remove --check-stack flag from V8. Review URL: http://codereview.chromium.org/338017 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@3149 ce2b1a6d-e550-0410-aec6-3dcde31c8c00 --- diff --git a/src/arm/codegen-arm.cc b/src/arm/codegen-arm.cc index 47f0e96..dd88515 100644 --- a/src/arm/codegen-arm.cc +++ b/src/arm/codegen-arm.cc @@ -1122,22 +1122,20 @@ void CodeGenerator::Branch(bool if_true, JumpTarget* target) { void CodeGenerator::CheckStack() { VirtualFrame::SpilledScope spilled_scope; - if (FLAG_check_stack) { - Comment cmnt(masm_, "[ check stack"); - __ LoadRoot(ip, Heap::kStackLimitRootIndex); - // Put the lr setup instruction in the delay slot. kInstrSize is added to - // the implicit 8 byte offset that always applies to operations with pc and - // gives a return address 12 bytes down. - masm_->add(lr, pc, Operand(Assembler::kInstrSize)); - masm_->cmp(sp, Operand(ip)); - StackCheckStub stub; - // Call the stub if lower. - masm_->mov(pc, - Operand(reinterpret_cast(stub.GetCode().location()), - RelocInfo::CODE_TARGET), - LeaveCC, - lo); - } + Comment cmnt(masm_, "[ check stack"); + __ LoadRoot(ip, Heap::kStackLimitRootIndex); + // Put the lr setup instruction in the delay slot. kInstrSize is added to + // the implicit 8 byte offset that always applies to operations with pc and + // gives a return address 12 bytes down. + masm_->add(lr, pc, Operand(Assembler::kInstrSize)); + masm_->cmp(sp, Operand(ip)); + StackCheckStub stub; + // Call the stub if lower. + masm_->mov(pc, + Operand(reinterpret_cast(stub.GetCode().location()), + RelocInfo::CODE_TARGET), + LeaveCC, + lo); } diff --git a/src/arm/fast-codegen-arm.cc b/src/arm/fast-codegen-arm.cc index 757a03f..bb5edc6 100644 --- a/src/arm/fast-codegen-arm.cc +++ b/src/arm/fast-codegen-arm.cc @@ -63,28 +63,25 @@ void FastCodeGenerator::Generate(FunctionLiteral* fun) { if (locals_count > 0) { __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); } - if (FLAG_check_stack) { - __ LoadRoot(r2, Heap::kStackLimitRootIndex); - } + __ LoadRoot(r2, Heap::kStackLimitRootIndex); for (int i = 0; i < locals_count; i++) { __ push(ip); } } - if (FLAG_check_stack) { - // Put the lr setup instruction in the delay slot. The kInstrSize is - // added to the implicit 8 byte offset that always applies to operations - // with pc and gives a return address 12 bytes down. - Comment cmnt(masm_, "[ Stack check"); - __ add(lr, pc, Operand(Assembler::kInstrSize)); - __ cmp(sp, Operand(r2)); - StackCheckStub stub; - __ mov(pc, - Operand(reinterpret_cast(stub.GetCode().location()), - RelocInfo::CODE_TARGET), - LeaveCC, - lo); - } + // Check the stack for overflow or break request. + // Put the lr setup instruction in the delay slot. The kInstrSize is + // added to the implicit 8 byte offset that always applies to operations + // with pc and gives a return address 12 bytes down. + Comment cmnt(masm_, "[ Stack check"); + __ add(lr, pc, Operand(Assembler::kInstrSize)); + __ cmp(sp, Operand(r2)); + StackCheckStub stub; + __ mov(pc, + Operand(reinterpret_cast(stub.GetCode().location()), + RelocInfo::CODE_TARGET), + LeaveCC, + lo); { Comment cmnt(masm_, "[ Declarations"); VisitDeclarations(fun->scope()->declarations()); diff --git a/src/arm/regexp-macro-assembler-arm.cc b/src/arm/regexp-macro-assembler-arm.cc index addc07d..bd50428 100644 --- a/src/arm/regexp-macro-assembler-arm.cc +++ b/src/arm/regexp-macro-assembler-arm.cc @@ -1100,14 +1100,12 @@ void RegExpMacroAssemblerARM::CheckPreemption() { void RegExpMacroAssemblerARM::CheckStackLimit() { - if (FLAG_check_stack) { - ExternalReference stack_limit = - ExternalReference::address_of_regexp_stack_limit(); - __ mov(r0, Operand(stack_limit)); - __ ldr(r0, MemOperand(r0)); - __ cmp(backtrack_stackpointer(), Operand(r0)); - SafeCall(&stack_overflow_label_, ls); - } + ExternalReference stack_limit = + ExternalReference::address_of_regexp_stack_limit(); + __ mov(r0, Operand(stack_limit)); + __ ldr(r0, MemOperand(r0)); + __ cmp(backtrack_stackpointer(), Operand(r0)); + SafeCall(&stack_overflow_label_, ls); } diff --git a/src/arm/virtual-frame-arm.cc b/src/arm/virtual-frame-arm.cc index 97d164e..47ecb96 100644 --- a/src/arm/virtual-frame-arm.cc +++ b/src/arm/virtual-frame-arm.cc @@ -146,29 +146,27 @@ void VirtualFrame::AllocateStackSlots() { // Initialize stack slots with 'undefined' value. __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); } - if (FLAG_check_stack) { - __ LoadRoot(r2, Heap::kStackLimitRootIndex); - } + __ LoadRoot(r2, Heap::kStackLimitRootIndex); for (int i = 0; i < count; i++) { __ push(ip); } - if (FLAG_check_stack) { - // Put the lr setup instruction in the delay slot. The kInstrSize is added - // to the implicit 8 byte offset that always applies to operations with pc - // and gives a return address 12 bytes down. - masm()->add(lr, pc, Operand(Assembler::kInstrSize)); - masm()->cmp(sp, Operand(r2)); - StackCheckStub stub; - // Call the stub if lower. - masm()->mov(pc, - Operand(reinterpret_cast(stub.GetCode().location()), - RelocInfo::CODE_TARGET), - LeaveCC, - lo); - } + // Check the stack for overflow or a break request. + // Put the lr setup instruction in the delay slot. The kInstrSize is added + // to the implicit 8 byte offset that always applies to operations with pc + // and gives a return address 12 bytes down. + masm()->add(lr, pc, Operand(Assembler::kInstrSize)); + masm()->cmp(sp, Operand(r2)); + StackCheckStub stub; + // Call the stub if lower. + masm()->mov(pc, + Operand(reinterpret_cast(stub.GetCode().location()), + RelocInfo::CODE_TARGET), + LeaveCC, + lo); } + void VirtualFrame::SaveContextRegister() { UNIMPLEMENTED(); } diff --git a/src/flag-definitions.h b/src/flag-definitions.h index 42c96b6..1ceb672 100644 --- a/src/flag-definitions.h +++ b/src/flag-definitions.h @@ -132,8 +132,6 @@ DEFINE_bool(stack_trace_on_abort, true, // codegen-ia32.cc / codegen-arm.cc DEFINE_bool(trace, false, "trace function calls") DEFINE_bool(defer_negation, true, "defer negation operation") -DEFINE_bool(check_stack, true, - "check stack for overflow, interrupt, breakpoint") // codegen.cc DEFINE_bool(lazy, true, "use lazy compilation") diff --git a/src/ia32/builtins-ia32.cc b/src/ia32/builtins-ia32.cc index ad44026..4381b22 100644 --- a/src/ia32/builtins-ia32.cc +++ b/src/ia32/builtins-ia32.cc @@ -520,48 +520,48 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) { __ push(Operand(ebp, 2 * kPointerSize)); // push arguments __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION); - if (FLAG_check_stack) { - // We need to catch preemptions right here, otherwise an unlucky preemption - // could show up as a failed apply. - ExternalReference stack_guard_limit = - ExternalReference::address_of_stack_guard_limit(); - Label retry_preemption; - Label no_preemption; - __ bind(&retry_preemption); - __ mov(edi, Operand::StaticVariable(stack_guard_limit)); - __ cmp(esp, Operand(edi)); - __ j(above, &no_preemption, taken); - - // Preemption! - // Because builtins always remove the receiver from the stack, we - // have to fake one to avoid underflowing the stack. - __ push(eax); - __ push(Immediate(Smi::FromInt(0))); + // Check the stack for overflow or a break request. + // We need to catch preemptions right here, otherwise an unlucky preemption + // could show up as a failed apply. + ExternalReference stack_guard_limit = + ExternalReference::address_of_stack_guard_limit(); + Label retry_preemption; + Label no_preemption; + __ bind(&retry_preemption); + __ mov(edi, Operand::StaticVariable(stack_guard_limit)); + __ cmp(esp, Operand(edi)); + __ j(above, &no_preemption, taken); + + // Preemption! + // Because builtins always remove the receiver from the stack, we + // have to fake one to avoid underflowing the stack. + __ push(eax); + __ push(Immediate(Smi::FromInt(0))); - // Do call to runtime routine. - __ CallRuntime(Runtime::kStackGuard, 1); - __ pop(eax); - __ jmp(&retry_preemption); - - __ bind(&no_preemption); - - Label okay; - // Make ecx the space we have left. - __ mov(ecx, Operand(esp)); - __ sub(ecx, Operand(edi)); - // Make edx the space we need for the array when it is unrolled onto the - // stack. - __ mov(edx, Operand(eax)); - __ shl(edx, kPointerSizeLog2 - kSmiTagSize); - __ cmp(ecx, Operand(edx)); - __ j(greater, &okay, taken); - - // Too bad: Out of stack space. - __ push(Operand(ebp, 4 * kPointerSize)); // push this - __ push(eax); - __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION); - __ bind(&okay); - } + // Do call to runtime routine. + __ CallRuntime(Runtime::kStackGuard, 1); + __ pop(eax); + __ jmp(&retry_preemption); + + __ bind(&no_preemption); + + Label okay; + // Make ecx the space we have left. + __ mov(ecx, Operand(esp)); + __ sub(ecx, Operand(edi)); + // Make edx the space we need for the array when it is unrolled onto the + // stack. + __ mov(edx, Operand(eax)); + __ shl(edx, kPointerSizeLog2 - kSmiTagSize); + __ cmp(ecx, Operand(edx)); + __ j(greater, &okay, taken); + + // Too bad: Out of stack space. + __ push(Operand(ebp, 4 * kPointerSize)); // push this + __ push(eax); + __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION); + __ bind(&okay); + // End of stack check. // Push current index and limit. const int kLimitOffset = diff --git a/src/ia32/codegen-ia32.cc b/src/ia32/codegen-ia32.cc index 003876b..4ac5527 100644 --- a/src/ia32/codegen-ia32.cc +++ b/src/ia32/codegen-ia32.cc @@ -2203,14 +2203,12 @@ void DeferredStackCheck::Generate() { void CodeGenerator::CheckStack() { - if (FLAG_check_stack) { - DeferredStackCheck* deferred = new DeferredStackCheck; - ExternalReference stack_guard_limit = - ExternalReference::address_of_stack_guard_limit(); - __ cmp(esp, Operand::StaticVariable(stack_guard_limit)); - deferred->Branch(below); - deferred->BindExit(); - } + DeferredStackCheck* deferred = new DeferredStackCheck; + ExternalReference stack_guard_limit = + ExternalReference::address_of_stack_guard_limit(); + __ cmp(esp, Operand::StaticVariable(stack_guard_limit)); + deferred->Branch(below); + deferred->BindExit(); } diff --git a/src/ia32/regexp-macro-assembler-ia32.cc b/src/ia32/regexp-macro-assembler-ia32.cc index b878550..76d36a9 100644 --- a/src/ia32/regexp-macro-assembler-ia32.cc +++ b/src/ia32/regexp-macro-assembler-ia32.cc @@ -1093,17 +1093,15 @@ void RegExpMacroAssemblerIA32::CheckPreemption() { void RegExpMacroAssemblerIA32::CheckStackLimit() { - if (FLAG_check_stack) { - Label no_stack_overflow; - ExternalReference stack_limit = - ExternalReference::address_of_regexp_stack_limit(); - __ cmp(backtrack_stackpointer(), Operand::StaticVariable(stack_limit)); - __ j(above, &no_stack_overflow); + Label no_stack_overflow; + ExternalReference stack_limit = + ExternalReference::address_of_regexp_stack_limit(); + __ cmp(backtrack_stackpointer(), Operand::StaticVariable(stack_limit)); + __ j(above, &no_stack_overflow); - SafeCall(&stack_overflow_label_); + SafeCall(&stack_overflow_label_); - __ bind(&no_stack_overflow); - } + __ bind(&no_stack_overflow); } diff --git a/src/x64/builtins-x64.cc b/src/x64/builtins-x64.cc index 01992ce..ea6c379 100644 --- a/src/x64/builtins-x64.cc +++ b/src/x64/builtins-x64.cc @@ -318,47 +318,47 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) { __ push(Operand(rbp, kArgumentsOffset)); __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION); - if (FLAG_check_stack) { - // We need to catch preemptions right here, otherwise an unlucky preemption - // could show up as a failed apply. - Label retry_preemption; - Label no_preemption; - __ bind(&retry_preemption); - ExternalReference stack_guard_limit = - ExternalReference::address_of_stack_guard_limit(); - __ movq(kScratchRegister, stack_guard_limit); - __ movq(rcx, rsp); - __ subq(rcx, Operand(kScratchRegister, 0)); - // rcx contains the difference between the stack limit and the stack top. - // We use it below to check that there is enough room for the arguments. - __ j(above, &no_preemption); - - // Preemption! - // Because runtime functions always remove the receiver from the stack, we - // have to fake one to avoid underflowing the stack. - __ push(rax); - __ Push(Smi::FromInt(0)); + // Check the stack for overflow or a break request. + // We need to catch preemptions right here, otherwise an unlucky preemption + // could show up as a failed apply. + Label retry_preemption; + Label no_preemption; + __ bind(&retry_preemption); + ExternalReference stack_guard_limit = + ExternalReference::address_of_stack_guard_limit(); + __ movq(kScratchRegister, stack_guard_limit); + __ movq(rcx, rsp); + __ subq(rcx, Operand(kScratchRegister, 0)); + // rcx contains the difference between the stack limit and the stack top. + // We use it below to check that there is enough room for the arguments. + __ j(above, &no_preemption); + + // Preemption! + // Because runtime functions always remove the receiver from the stack, we + // have to fake one to avoid underflowing the stack. + __ push(rax); + __ Push(Smi::FromInt(0)); - // Do call to runtime routine. - __ CallRuntime(Runtime::kStackGuard, 1); - __ pop(rax); - __ jmp(&retry_preemption); + // Do call to runtime routine. + __ CallRuntime(Runtime::kStackGuard, 1); + __ pop(rax); + __ jmp(&retry_preemption); - __ bind(&no_preemption); + __ bind(&no_preemption); - Label okay; - // Make rdx the space we need for the array when it is unrolled onto the - // stack. - __ PositiveSmiTimesPowerOfTwoToInteger64(rdx, rax, kPointerSizeLog2); - __ cmpq(rcx, rdx); - __ j(greater, &okay); + Label okay; + // Make rdx the space we need for the array when it is unrolled onto the + // stack. + __ PositiveSmiTimesPowerOfTwoToInteger64(rdx, rax, kPointerSizeLog2); + __ cmpq(rcx, rdx); + __ j(greater, &okay); - // Too bad: Out of stack space. - __ push(Operand(rbp, kFunctionOffset)); - __ push(rax); - __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION); - __ bind(&okay); - } + // Too bad: Out of stack space. + __ push(Operand(rbp, kFunctionOffset)); + __ push(rax); + __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION); + __ bind(&okay); + // End of stack check. // Push current index and limit. const int kLimitOffset = diff --git a/src/x64/codegen-x64.cc b/src/x64/codegen-x64.cc index 33c06f7..95f30d8 100644 --- a/src/x64/codegen-x64.cc +++ b/src/x64/codegen-x64.cc @@ -852,12 +852,10 @@ void DeferredStackCheck::Generate() { void CodeGenerator::CheckStack() { - if (FLAG_check_stack) { - DeferredStackCheck* deferred = new DeferredStackCheck; - __ CompareRoot(rsp, Heap::kStackLimitRootIndex); - deferred->Branch(below); - deferred->BindExit(); - } + DeferredStackCheck* deferred = new DeferredStackCheck; + __ CompareRoot(rsp, Heap::kStackLimitRootIndex); + deferred->Branch(below); + deferred->BindExit(); } diff --git a/src/x64/regexp-macro-assembler-x64.cc b/src/x64/regexp-macro-assembler-x64.cc index d8dcc37..88636f8 100644 --- a/src/x64/regexp-macro-assembler-x64.cc +++ b/src/x64/regexp-macro-assembler-x64.cc @@ -1209,18 +1209,16 @@ void RegExpMacroAssemblerX64::CheckPreemption() { void RegExpMacroAssemblerX64::CheckStackLimit() { - if (FLAG_check_stack) { - Label no_stack_overflow; - ExternalReference stack_limit = - ExternalReference::address_of_regexp_stack_limit(); - __ load_rax(stack_limit); - __ cmpq(backtrack_stackpointer(), rax); - __ j(above, &no_stack_overflow); + Label no_stack_overflow; + ExternalReference stack_limit = + ExternalReference::address_of_regexp_stack_limit(); + __ load_rax(stack_limit); + __ cmpq(backtrack_stackpointer(), rax); + __ j(above, &no_stack_overflow); - SafeCall(&stack_overflow_label_); + SafeCall(&stack_overflow_label_); - __ bind(&no_stack_overflow); - } + __ bind(&no_stack_overflow); }