ARM: make predictable code size scope more precise in DoDeferredInstanceOfKnownGlobal.
authorulan <ulan@chromium.org>
Fri, 19 Jun 2015 04:54:43 +0000 (21:54 -0700)
committerCommit bot <commit-bot@chromium.org>
Fri, 19 Jun 2015 04:54:51 +0000 (04:54 +0000)
We block constant pool up to the call stub instruction, but the check for code size
includes the next instruction after the call instruction. That instruction can
emit constant pool.

BUG=chromium:500831
LOG=NO
TEST=mjsunit/regress/regress-500831

Review URL: https://codereview.chromium.org/1189123003

Cr-Commit-Position: refs/heads/master@{#29132}

src/arm/lithium-codegen-arm.cc
test/mjsunit/regress/regress-500831.js [new file with mode: 0644]

index a5639f8..c1edac7 100644 (file)
@@ -2826,37 +2826,41 @@ void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
 
   int call_size = CallCodeSize(stub.GetCode(), RelocInfo::CODE_TARGET);
   int additional_delta = (call_size / Assembler::kInstrSize) + 4;
-  // Make sure that code size is predicable, since we use specific constants
-  // offsets in the code to find embedded values..
-  PredictableCodeSizeScope predictable(
-      masm_, (additional_delta + 1) * Assembler::kInstrSize);
-  // Make sure we don't emit any additional entries in the constant pool before
-  // the call to ensure that the CallCodeSize() calculated the correct number of
-  // instructions for the constant pool load.
   {
-    ConstantPoolUnavailableScope constant_pool_unavailable(masm_);
-    int map_check_delta =
-        masm_->InstructionsGeneratedSince(map_check) + additional_delta;
-    int bool_load_delta =
-        masm_->InstructionsGeneratedSince(bool_load) + additional_delta;
-    Label before_push_delta;
-    __ bind(&before_push_delta);
-    __ BlockConstPoolFor(additional_delta);
-    // r5 is used to communicate the offset to the location of the map check.
-    __ mov(r5, Operand(map_check_delta * kPointerSize));
-    // r6 is used to communicate the offset to the location of the bool load.
-    __ mov(r6, Operand(bool_load_delta * kPointerSize));
-    // The mov above can generate one or two instructions. The delta was
-    // computed for two instructions, so we need to pad here in case of one
-    // instruction.
-    while (masm_->InstructionsGeneratedSince(&before_push_delta) != 4) {
-      __ nop();
+    // Make sure that code size is predicable, since we use specific constants
+    // offsets in the code to find embedded values..
+    PredictableCodeSizeScope predictable(
+        masm_, additional_delta * Assembler::kInstrSize);
+    // The labels must be already bound since the code has predictabel size up
+    // to the call instruction.
+    DCHECK(map_check->is_bound());
+    DCHECK(bool_load->is_bound());
+    // Make sure we don't emit any additional entries in the constant pool
+    // before the call to ensure that the CallCodeSize() calculated the
+    // correct number of instructions for the constant pool load.
+    {
+      ConstantPoolUnavailableScope constant_pool_unavailable(masm_);
+      int map_check_delta =
+          masm_->InstructionsGeneratedSince(map_check) + additional_delta;
+      int bool_load_delta =
+          masm_->InstructionsGeneratedSince(bool_load) + additional_delta;
+      Label before_push_delta;
+      __ bind(&before_push_delta);
+      __ BlockConstPoolFor(additional_delta);
+      // r5 is used to communicate the offset to the location of the map check.
+      __ mov(r5, Operand(map_check_delta * kPointerSize));
+      // r6 is used to communicate the offset to the location of the bool load.
+      __ mov(r6, Operand(bool_load_delta * kPointerSize));
+      // The mov above can generate one or two instructions. The delta was
+      // computed for two instructions, so we need to pad here in case of one
+      // instruction.
+      while (masm_->InstructionsGeneratedSince(&before_push_delta) != 4) {
+        __ nop();
+      }
     }
+    CallCodeGeneric(stub.GetCode(), RelocInfo::CODE_TARGET, instr,
+                    RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
   }
-  CallCodeGeneric(stub.GetCode(),
-                  RelocInfo::CODE_TARGET,
-                  instr,
-                  RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
   LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment();
   safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
   // Put the result value (r0) into the result register slot and
diff --git a/test/mjsunit/regress/regress-500831.js b/test/mjsunit/regress/regress-500831.js
new file mode 100644 (file)
index 0000000..6d8cfaf
--- /dev/null
@@ -0,0 +1,94 @@
+// Copyright 2015 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Flags:  --allow-natives-syntax
+
+// To reproduce reliably use: --random-seed=-2012454635 --nodebug-code
+
+function deepEquals(a, b) {
+  if (a === b) {;
+    return true;
+  }
+  if (typeof a != typeof b) return false;
+  if (typeof a == "number");
+  if (typeof a !== "object" && typeof a !== "function")
+    return false;
+  var objectClass = classOf();
+  if (b) return false;
+  if (objectClass === "RegExp") {;
+  }
+  if (objectClass === "Function") return false;
+  if (objectClass === "Array") {
+    var elementCount = 0;
+    if (a.length != b.length) {
+      return false;
+    }
+    for (var i = 0; i < a.length; i++) {
+      if (a[i][i]) return false;
+    }
+    return true;
+  }
+  if (objectClass == "String" || objectClass == "Number" ||
+      objectClass == "Boolean" || objectClass == "Date") {
+    if (a.valueOf()) return false;
+  };
+}
+function equals(expected, found, name_opt) {
+  if (!deepEquals(found, expected)) {}
+};
+function instof(obj, type) {
+  if (!(obj instanceof type)) {
+    var actualTypeName = null;
+    var actualConstructor = Object.getPrototypeOf().constructor;
+    if (typeof actualConstructor == "function") {;
+    };
+  }
+};
+var __v_0 = 1;
+var __v_6 = {};
+var __v_9 = {};
+
+function __f_4() {
+  return function() {};
+}
+__v_6 = new Uint8ClampedArray(10);
+
+function __f_6() {
+  __v_6[0] = 0.499;
+  instof(__f_4(), Function);
+  equals();
+  __v_6[0] = 0.5;
+  equals();
+  __v_0[0] = 0.501;
+  equals(__v_6[4294967295]);
+  __v_6[0] = 1.499;
+  equals();
+  __v_6[0] = 1.5;
+  equals();
+  __v_6[0] = 1.501;
+  equals();
+  __v_6[0] = 2.5;
+  equals(__v_6[-1073741824]);
+  __v_6[0] = 3.5;
+  equals();
+  __v_6[0] = 252.5;
+  equals();
+  __v_6[0] = 253.5;
+  equals();
+  __v_6[0] = 254.5;
+  equals();
+  __v_6[0] = 256.5;
+  equals();
+  __v_6[0] = -0.5;
+  equals(__v_6[8]);
+  __v_6[0] = -1.5;
+  equals();
+  __v_6[0] = 1000000000000;
+  equals();
+  __v_9[0] = -1000000000000;
+  equals(__v_6[0]);
+}
+__f_6();
+__f_6(); % OptimizeFunctionOnNextCall(__f_6);
+__f_6();