Introduce DontEmitDebugCodeScope to fix the x64 nosnapshot build.
authorhaitao.feng@intel.com <haitao.feng@intel.com@ce2b1a6d-e550-0410-aec6-3dcde31c8c00>
Wed, 16 Apr 2014 02:06:14 +0000 (02:06 +0000)
committerhaitao.feng@intel.com <haitao.feng@intel.com@ce2b1a6d-e550-0410-aec6-3dcde31c8c00>
Wed, 16 Apr 2014 02:06:14 +0000 (02:06 +0000)
R=yangguo@chromium.org

Review URL: https://codereview.chromium.org/232673003

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@20784 ce2b1a6d-e550-0410-aec6-3dcde31c8c00

src/assembler.h
src/x64/full-codegen-x64.cc

index 0349b065821b57f1ad282ec0f58959261e6e575e..af5a2a5bfc82a9a33d2d63ce81f357d6ac63bc2b 100644 (file)
@@ -107,6 +107,22 @@ class AssemblerBase: public Malloced {
 };
 
 
+// Avoids emitting debug code during the lifetime of this scope object.
+class DontEmitDebugCodeScope BASE_EMBEDDED {
+ public:
+  explicit DontEmitDebugCodeScope(AssemblerBase* assembler)
+      : assembler_(assembler), old_value_(assembler->emit_debug_code()) {
+    assembler_->set_emit_debug_code(false);
+  }
+  ~DontEmitDebugCodeScope() {
+    assembler_->set_emit_debug_code(old_value_);
+  };
+ private:
+  AssemblerBase* assembler_;
+  bool old_value_;
+};
+
+
 // Avoids using instructions that vary in size in unpredictable ways between the
 // snapshot and the running VM.
 class PredictableCodeSizeScope {
index 4ebef21c1ca6adcce54eace79178ae2b500002e0..c39ca97ac7f34ccd643bbd601c96a5a4725588e0 100644 (file)
@@ -363,6 +363,9 @@ void FullCodeGenerator::EmitProfilingCounterReset() {
 }
 
 
+static const byte kJnsOffset = kPointerSize == kInt64Size ? 0x1d : 0x14;
+
+
 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
                                                 Label* back_edge_target) {
   Comment cmnt(masm_, "[ Back edge bookkeeping");
@@ -373,17 +376,22 @@ void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
   int weight = Min(kMaxBackEdgeWeight,
                    Max(1, distance / kCodeSizeMultiplier));
   EmitProfilingCounterDecrement(weight);
-  __ j(positive, &ok, Label::kNear);
-  __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
 
-  // Record a mapping of this PC offset to the OSR id.  This is used to find
-  // the AST id from the unoptimized code in order to use it as a key into
-  // the deoptimization input data found in the optimized code.
-  RecordBackEdge(stmt->OsrEntryId());
+  __ j(positive, &ok, Label::kNear);
+  {
+    PredictableCodeSizeScope predictible_code_size_scope(masm_, kJnsOffset);
+    DontEmitDebugCodeScope dont_emit_debug_code_scope(masm_);
+    __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
 
-  EmitProfilingCounterReset();
+    // Record a mapping of this PC offset to the OSR id.  This is used to find
+    // the AST id from the unoptimized code in order to use it as a key into
+    // the deoptimization input data found in the optimized code.
+    RecordBackEdge(stmt->OsrEntryId());
 
+    EmitProfilingCounterReset();
+  }
   __ bind(&ok);
+
   PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
   // Record a mapping of the OSR id to this PC.  This is used if the OSR
   // entry becomes the target of a bailout.  We don't expect it to be, but
@@ -4844,7 +4852,6 @@ FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
 
 
 static const byte kJnsInstruction = 0x79;
-static const byte kJnsOffset = kPointerSize == kInt64Size ? 0x1d : 0x14;
 static const byte kNopByteOne = 0x66;
 static const byte kNopByteTwo = 0x90;
 #ifdef DEBUG