From: haitao.feng@intel.com Date: Wed, 16 Apr 2014 02:06:14 +0000 (+0000) Subject: Introduce DontEmitDebugCodeScope to fix the x64 nosnapshot build. X-Git-Tag: upstream/4.7.83~9587 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=7ffbbbef3c44bc835c17f8346ce35b040ae0ff62;p=platform%2Fupstream%2Fv8.git Introduce DontEmitDebugCodeScope to fix the x64 nosnapshot build. R=yangguo@chromium.org Review URL: https://codereview.chromium.org/232673003 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@20784 ce2b1a6d-e550-0410-aec6-3dcde31c8c00 --- diff --git a/src/assembler.h b/src/assembler.h index 0349b0658..af5a2a5bf 100644 --- a/src/assembler.h +++ b/src/assembler.h @@ -107,6 +107,22 @@ class AssemblerBase: public Malloced { }; +// Avoids emitting debug code during the lifetime of this scope object. +class DontEmitDebugCodeScope BASE_EMBEDDED { + public: + explicit DontEmitDebugCodeScope(AssemblerBase* assembler) + : assembler_(assembler), old_value_(assembler->emit_debug_code()) { + assembler_->set_emit_debug_code(false); + } + ~DontEmitDebugCodeScope() { + assembler_->set_emit_debug_code(old_value_); + }; + private: + AssemblerBase* assembler_; + bool old_value_; +}; + + // Avoids using instructions that vary in size in unpredictable ways between the // snapshot and the running VM. class PredictableCodeSizeScope { diff --git a/src/x64/full-codegen-x64.cc b/src/x64/full-codegen-x64.cc index 4ebef21c1..c39ca97ac 100644 --- a/src/x64/full-codegen-x64.cc +++ b/src/x64/full-codegen-x64.cc @@ -363,6 +363,9 @@ void FullCodeGenerator::EmitProfilingCounterReset() { } +static const byte kJnsOffset = kPointerSize == kInt64Size ? 0x1d : 0x14; + + void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, Label* back_edge_target) { Comment cmnt(masm_, "[ Back edge bookkeeping"); @@ -373,17 +376,22 @@ void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt, int weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier)); EmitProfilingCounterDecrement(weight); - __ j(positive, &ok, Label::kNear); - __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET); - // Record a mapping of this PC offset to the OSR id. This is used to find - // the AST id from the unoptimized code in order to use it as a key into - // the deoptimization input data found in the optimized code. - RecordBackEdge(stmt->OsrEntryId()); + __ j(positive, &ok, Label::kNear); + { + PredictableCodeSizeScope predictible_code_size_scope(masm_, kJnsOffset); + DontEmitDebugCodeScope dont_emit_debug_code_scope(masm_); + __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET); - EmitProfilingCounterReset(); + // Record a mapping of this PC offset to the OSR id. This is used to find + // the AST id from the unoptimized code in order to use it as a key into + // the deoptimization input data found in the optimized code. + RecordBackEdge(stmt->OsrEntryId()); + EmitProfilingCounterReset(); + } __ bind(&ok); + PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); // Record a mapping of the OSR id to this PC. This is used if the OSR // entry becomes the target of a bailout. We don't expect it to be, but @@ -4844,7 +4852,6 @@ FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit( static const byte kJnsInstruction = 0x79; -static const byte kJnsOffset = kPointerSize == kInt64Size ? 0x1d : 0x14; static const byte kNopByteOne = 0x66; static const byte kNopByteTwo = 0x90; #ifdef DEBUG