Drop IsPregenerated() and allow_stub_calls flag.
authorbmeurer@chromium.org <bmeurer@chromium.org@ce2b1a6d-e550-0410-aec6-3dcde31c8c00>
Fri, 29 Nov 2013 20:49:15 +0000 (20:49 +0000)
committerbmeurer@chromium.org <bmeurer@chromium.org@ce2b1a6d-e550-0410-aec6-3dcde31c8c00>
Fri, 29 Nov 2013 20:49:15 +0000 (20:49 +0000)
This also removes the fixed register hack that was required to support RecordWriteStub in the snapshot.

R=mstarzinger@chromium.org

Review URL: https://codereview.chromium.org/96853003

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@18167 ce2b1a6d-e550-0410-aec6-3dcde31c8c00

21 files changed:
src/arm/code-stubs-arm.cc
src/arm/code-stubs-arm.h
src/arm/macro-assembler-arm.cc
src/arm/macro-assembler-arm.h
src/code-stubs-hydrogen.cc
src/code-stubs.cc
src/code-stubs.h
src/ia32/code-stubs-ia32.cc
src/ia32/code-stubs-ia32.h
src/ia32/macro-assembler-ia32.cc
src/ia32/macro-assembler-ia32.h
src/x64/code-stubs-x64.cc
src/x64/code-stubs-x64.h
src/x64/macro-assembler-x64.cc
src/x64/macro-assembler-x64.h
test/cctest/test-code-stubs-arm.cc
test/cctest/test-code-stubs-ia32.cc
test/cctest/test-code-stubs-x64.cc
test/cctest/test-macro-assembler-arm.cc
test/cctest/test-macro-assembler-ia32.cc
test/cctest/test-macro-assembler-x64.cc

index 09eca72..7afede3 100644 (file)
@@ -693,21 +693,6 @@ void DoubleToIStub::Generate(MacroAssembler* masm) {
 }
 
 
-bool WriteInt32ToHeapNumberStub::IsPregenerated(Isolate* isolate) {
-  // These variants are compiled ahead of time.  See next method.
-  if (the_int_.is(r1) && the_heap_number_.is(r0) && scratch_.is(r2)) {
-    return true;
-  }
-  if (the_int_.is(r2) && the_heap_number_.is(r0) && scratch_.is(r3)) {
-    return true;
-  }
-  // Other register combinations are generated as and when they are needed,
-  // so it is unsafe to call them from stubs (we can't generate a stub while
-  // we are generating a stub).
-  return false;
-}
-
-
 void WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime(
     Isolate* isolate) {
   WriteInt32ToHeapNumberStub stub1(r1, r0, r2);
@@ -1674,18 +1659,11 @@ bool CEntryStub::NeedsImmovableCode() {
 }
 
 
-bool CEntryStub::IsPregenerated(Isolate* isolate) {
-  return (!save_doubles_ || isolate->fp_stubs_generated()) &&
-          result_size_ == 1;
-}
-
-
 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
   CEntryStub::GenerateAheadOfTime(isolate);
   WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime(isolate);
   StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
   StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
-  RecordWriteStub::GenerateFixedRegStubsAheadOfTime(isolate);
   ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
   CreateAllocationSiteStub::GenerateAheadOfTime(isolate);
   BinaryOpStub::GenerateAheadOfTime(isolate);
@@ -5347,68 +5325,6 @@ void NameDictionaryLookupStub::Generate(MacroAssembler* masm) {
 }
 
 
-struct AheadOfTimeWriteBarrierStubList {
-  Register object, value, address;
-  RememberedSetAction action;
-};
-
-
-#define REG(Name) { kRegister_ ## Name ## _Code }
-
-static const AheadOfTimeWriteBarrierStubList kAheadOfTime[] = {
-  // Used in RegExpExecStub.
-  { REG(r6), REG(r4), REG(r3), EMIT_REMEMBERED_SET },
-  // Used in CompileArrayPushCall.
-  // Also used in StoreIC::GenerateNormal via GenerateDictionaryStore.
-  // Also used in KeyedStoreIC::GenerateGeneric.
-  { REG(r3), REG(r4), REG(r5), EMIT_REMEMBERED_SET },
-  // Used in StoreStubCompiler::CompileStoreField via GenerateStoreField.
-  { REG(r1), REG(r2), REG(r3), EMIT_REMEMBERED_SET },
-  { REG(r3), REG(r2), REG(r1), EMIT_REMEMBERED_SET },
-  // Used in KeyedStoreStubCompiler::CompileStoreField via GenerateStoreField.
-  { REG(r2), REG(r1), REG(r3), EMIT_REMEMBERED_SET },
-  { REG(r3), REG(r1), REG(r2), EMIT_REMEMBERED_SET },
-  // KeyedStoreStubCompiler::GenerateStoreFastElement.
-  { REG(r3), REG(r2), REG(r4), EMIT_REMEMBERED_SET },
-  { REG(r2), REG(r3), REG(r4), EMIT_REMEMBERED_SET },
-  // ElementsTransitionGenerator::GenerateMapChangeElementTransition
-  // and ElementsTransitionGenerator::GenerateSmiToDouble
-  // and ElementsTransitionGenerator::GenerateDoubleToObject
-  { REG(r2), REG(r3), REG(r9), EMIT_REMEMBERED_SET },
-  { REG(r2), REG(r3), REG(r9), OMIT_REMEMBERED_SET },
-  // ElementsTransitionGenerator::GenerateDoubleToObject
-  { REG(r6), REG(r2), REG(r0), EMIT_REMEMBERED_SET },
-  { REG(r2), REG(r6), REG(r9), EMIT_REMEMBERED_SET },
-  // StoreArrayLiteralElementStub::Generate
-  { REG(r5), REG(r0), REG(r6), EMIT_REMEMBERED_SET },
-  // FastNewClosureStub::Generate
-  { REG(r2), REG(r4), REG(r1), EMIT_REMEMBERED_SET },
-  // StringAddStub::Generate
-  { REG(r3), REG(r1), REG(r4), EMIT_REMEMBERED_SET },
-  { REG(r3), REG(r0), REG(r4), EMIT_REMEMBERED_SET },
-  // Null termination.
-  { REG(no_reg), REG(no_reg), REG(no_reg), EMIT_REMEMBERED_SET}
-};
-
-#undef REG
-
-
-bool RecordWriteStub::IsPregenerated(Isolate* isolate) {
-  for (const AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
-       !entry->object.is(no_reg);
-       entry++) {
-    if (object_.is(entry->object) &&
-        value_.is(entry->value) &&
-        address_.is(entry->address) &&
-        remembered_set_action_ == entry->action &&
-        save_fp_regs_mode_ == kDontSaveFPRegs) {
-      return true;
-    }
-  }
-  return false;
-}
-
-
 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
     Isolate* isolate) {
   StoreBufferOverflowStub stub1(kDontSaveFPRegs);
@@ -5419,20 +5335,6 @@ void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
 }
 
 
-void RecordWriteStub::GenerateFixedRegStubsAheadOfTime(Isolate* isolate) {
-  for (const AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
-       !entry->object.is(no_reg);
-       entry++) {
-    RecordWriteStub stub(entry->object,
-                         entry->value,
-                         entry->address,
-                         entry->action,
-                         kDontSaveFPRegs);
-    stub.GetCode(isolate)->set_is_pregenerated(true);
-  }
-}
-
-
 bool CodeStub::CanUseFPRegisters() {
   return true;  // VFP2 is a base requirement for V8
 }
@@ -5738,7 +5640,6 @@ void StubFailureTailCallTrampolineStub::Generate(MacroAssembler* masm) {
 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
   if (masm->isolate()->function_entry_hook() != NULL) {
     PredictableCodeSizeScope predictable(masm, 4 * Assembler::kInstrSize);
-    AllowStubCallsScope allow_stub_calls(masm, true);
     ProfileEntryHookStub stub;
     __ push(lr);
     __ CallStub(&stub);
index c03d8f2..e400686 100644 (file)
@@ -68,7 +68,6 @@ class StoreBufferOverflowStub: public PlatformCodeStub {
 
   void Generate(MacroAssembler* masm);
 
-  virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE { return true; }
   static void GenerateFixedRegStubsAheadOfTime(Isolate* isolate);
   virtual bool SometimesSetsUpAFrame() { return false; }
 
@@ -231,7 +230,6 @@ class WriteInt32ToHeapNumberStub : public PlatformCodeStub {
         the_heap_number_(the_heap_number),
         scratch_(scratch) { }
 
-  virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE;
   static void GenerateFixedRegStubsAheadOfTime(Isolate* isolate);
 
  private:
@@ -279,8 +277,6 @@ class RecordWriteStub: public PlatformCodeStub {
     INCREMENTAL_COMPACTION
   };
 
-  virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE;
-  static void GenerateFixedRegStubsAheadOfTime(Isolate* isolate);
   virtual bool SometimesSetsUpAFrame() { return false; }
 
   static void PatchBranchIntoNop(MacroAssembler* masm, int pos) {
index eaaaefa..5f6076b 100644 (file)
@@ -44,7 +44,6 @@ namespace internal {
 MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size)
     : Assembler(arg_isolate, buffer, size),
       generating_stub_(false),
-      allow_stub_calls_(true),
       has_frame_(false) {
   if (isolate() != NULL) {
     code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
@@ -2289,8 +2288,6 @@ void MacroAssembler::CallStub(CodeStub* stub,
 
 
 void MacroAssembler::TailCallStub(CodeStub* stub, Condition cond) {
-  ASSERT(allow_stub_calls_ ||
-         stub->CompilingCallsToThisStubIsGCSafe(isolate()));
   Jump(stub->GetCode(isolate()), RelocInfo::CODE_TARGET, cond);
 }
 
@@ -2435,8 +2432,7 @@ void MacroAssembler::CallApiFunctionAndReturn(
 
 
 bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
-  if (!has_frame_ && stub->SometimesSetsUpAFrame()) return false;
-  return allow_stub_calls_ || stub->CompilingCallsToThisStubIsGCSafe(isolate());
+  return has_frame_ || !stub->SometimesSetsUpAFrame();
 }
 
 
index 3c71bad..f71c1a3 100644 (file)
@@ -1198,8 +1198,6 @@ class MacroAssembler: public Assembler {
   // Verify restrictions about code generated in stubs.
   void set_generating_stub(bool value) { generating_stub_ = value; }
   bool generating_stub() { return generating_stub_; }
-  void set_allow_stub_calls(bool value) { allow_stub_calls_ = value; }
-  bool allow_stub_calls() { return allow_stub_calls_; }
   void set_has_frame(bool value) { has_frame_ = value; }
   bool has_frame() { return has_frame_; }
   inline bool AllowThisStubCall(CodeStub* stub);
@@ -1478,7 +1476,6 @@ class MacroAssembler: public Assembler {
   MemOperand SafepointRegistersAndDoublesSlot(Register reg);
 
   bool generating_stub_;
-  bool allow_stub_calls_;
   bool has_frame_;
   // This handle will be patched with the code object on installation.
   Handle<Object> code_object_;
index f717d75..03b8c18 100644 (file)
@@ -251,9 +251,6 @@ Handle<Code> HydrogenCodeStub::GenerateLightweightMissCode(Isolate* isolate) {
     // Update the static counter each time a new code stub is generated.
     isolate->counters()->code_stubs()->Increment();
 
-    // Nested stubs are not allowed for leaves.
-    AllowStubCallsScope allow_scope(&masm, false);
-
     // Generate the code for the stub.
     masm.set_generating_stub(true);
     NoCurrentFrameScope scope(&masm);
index 87fca37..01f3f8c 100644 (file)
@@ -110,9 +110,6 @@ Handle<Code> PlatformCodeStub::GenerateCode(Isolate* isolate) {
     // Update the static counter each time a new code stub is generated.
     isolate->counters()->code_stubs()->Increment();
 
-    // Nested stubs are not allowed for leaves.
-    AllowStubCallsScope allow_scope(&masm, false);
-
     // Generate the code for the stub.
     masm.set_generating_stub(true);
     NoCurrentFrameScope scope(&masm);
@@ -148,7 +145,6 @@ Handle<Code> CodeStub::GetCode(Isolate* isolate) {
   if (UseSpecialCache()
       ? FindCodeInSpecialCache(&code, isolate)
       : FindCodeInCache(&code, isolate)) {
-    ASSERT(IsPregenerated(isolate) == code->is_pregenerated());
     ASSERT(GetCodeKind() == code->kind());
     return Handle<Code>(code);
   }
index e20060f..a852a5c 100644 (file)
@@ -161,16 +161,6 @@ class CodeStub BASE_EMBEDDED {
 
   virtual ~CodeStub() {}
 
-  bool CompilingCallsToThisStubIsGCSafe(Isolate* isolate) {
-    bool is_pregenerated = IsPregenerated(isolate);
-    Code* code = NULL;
-    CHECK(!is_pregenerated || FindCodeInCache(&code, isolate));
-    return is_pregenerated;
-  }
-
-  // See comment above, where Instanceof is defined.
-  virtual bool IsPregenerated(Isolate* isolate) { return false; }
-
   static void GenerateStubsAheadOfTime(Isolate* isolate);
   static void GenerateStubsRequiringBuiltinsAheadOfTime(Isolate* isolate);
   static void GenerateFPStubs(Isolate* isolate);
@@ -683,8 +673,6 @@ class CreateAllocationSiteStub : public HydrogenCodeStub {
 
   virtual Handle<Code> GenerateCode(Isolate* isolate);
 
-  virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE { return true; }
-
   static void GenerateAheadOfTime(Isolate* isolate);
 
   virtual void InitializeInterfaceDescriptor(
@@ -1467,7 +1455,6 @@ class CEntryStub : public PlatformCodeStub {
   // time, so it's OK to call it from other stubs that can't cope with GC during
   // their code generation.  On machines that always have gp registers (x64) we
   // can generate both variants ahead of time.
-  virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE;
   static void GenerateAheadOfTime(Isolate* isolate);
 
  protected:
@@ -1819,24 +1806,6 @@ class StringCharAtGenerator {
 };
 
 
-class AllowStubCallsScope {
- public:
-  AllowStubCallsScope(MacroAssembler* masm, bool allow)
-       : masm_(masm), previous_allow_(masm->allow_stub_calls()) {
-    masm_->set_allow_stub_calls(allow);
-  }
-  ~AllowStubCallsScope() {
-    masm_->set_allow_stub_calls(previous_allow_);
-  }
-
- private:
-  MacroAssembler* masm_;
-  bool previous_allow_;
-
-  DISALLOW_COPY_AND_ASSIGN(AllowStubCallsScope);
-};
-
-
 class KeyedLoadDictionaryElementStub : public HydrogenCodeStub {
  public:
   KeyedLoadDictionaryElementStub() {}
@@ -2084,11 +2053,6 @@ class ArrayConstructorStubBase : public HydrogenCodeStub {
     return ContextCheckModeBits::decode(bit_field_);
   }
 
-  virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE {
-    // We only pre-generate stubs that verify correct context
-    return context_mode() == CONTEXT_CHECK_REQUIRED;
-  }
-
   static void GenerateStubsAheadOfTime(Isolate* isolate);
   static void InstallDescriptors(Isolate* isolate);
 
@@ -2185,7 +2149,6 @@ class InternalArrayConstructorStubBase : public HydrogenCodeStub {
     kind_ = kind;
   }
 
-  virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE { return true; }
   static void GenerateStubsAheadOfTime(Isolate* isolate);
   static void InstallDescriptors(Isolate* isolate);
 
@@ -2449,8 +2412,6 @@ class StubFailureTrampolineStub : public PlatformCodeStub {
   explicit StubFailureTrampolineStub(StubFunctionMode function_mode)
       : fp_registers_(CanUseFPRegisters()), function_mode_(function_mode) {}
 
-  virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE { return true; }
-
   static void GenerateAheadOfTime(Isolate* isolate);
 
  private:
@@ -2476,8 +2437,6 @@ class StubFailureTailCallTrampolineStub : public PlatformCodeStub {
  public:
   StubFailureTailCallTrampolineStub() : fp_registers_(CanUseFPRegisters()) {}
 
-  virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE { return true; }
-
   static void GenerateAheadOfTime(Isolate* isolate);
 
  private:
index 715feca..8ce5fdd 100644 (file)
@@ -2952,18 +2952,11 @@ bool CEntryStub::NeedsImmovableCode() {
 }
 
 
-bool CEntryStub::IsPregenerated(Isolate* isolate) {
-  return (!save_doubles_ || isolate->fp_stubs_generated()) &&
-          result_size_ == 1;
-}
-
-
 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
   CEntryStub::GenerateAheadOfTime(isolate);
   StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
   StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
   // It is important that the store buffer overflow stubs are generated first.
-  RecordWriteStub::GenerateFixedRegStubsAheadOfTime(isolate);
   ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
   CreateAllocationSiteStub::GenerateAheadOfTime(isolate);
   if (Serializer::enabled()) {
@@ -5287,69 +5280,6 @@ void NameDictionaryLookupStub::Generate(MacroAssembler* masm) {
 }
 
 
-struct AheadOfTimeWriteBarrierStubList {
-  Register object, value, address;
-  RememberedSetAction action;
-};
-
-
-#define REG(Name) { kRegister_ ## Name ## _Code }
-
-static const AheadOfTimeWriteBarrierStubList kAheadOfTime[] = {
-  // Used in RegExpExecStub.
-  { REG(ebx), REG(eax), REG(edi), EMIT_REMEMBERED_SET },
-  // Used in CompileArrayPushCall.
-  { REG(ebx), REG(ecx), REG(edx), EMIT_REMEMBERED_SET },
-  { REG(ebx), REG(edi), REG(edx), OMIT_REMEMBERED_SET },
-  // Used in StoreStubCompiler::CompileStoreField and
-  // KeyedStoreStubCompiler::CompileStoreField via GenerateStoreField.
-  { REG(edx), REG(ecx), REG(ebx), EMIT_REMEMBERED_SET },
-  // GenerateStoreField calls the stub with two different permutations of
-  // registers.  This is the second.
-  { REG(ebx), REG(ecx), REG(edx), EMIT_REMEMBERED_SET },
-  // StoreIC::GenerateNormal via GenerateDictionaryStore
-  { REG(ebx), REG(edi), REG(edx), EMIT_REMEMBERED_SET },
-  // KeyedStoreIC::GenerateGeneric.
-  { REG(ebx), REG(edx), REG(ecx), EMIT_REMEMBERED_SET},
-  // KeyedStoreStubCompiler::GenerateStoreFastElement.
-  { REG(edi), REG(ebx), REG(ecx), EMIT_REMEMBERED_SET},
-  { REG(edx), REG(edi), REG(ebx), EMIT_REMEMBERED_SET},
-  // ElementsTransitionGenerator::GenerateMapChangeElementTransition
-  // and ElementsTransitionGenerator::GenerateSmiToDouble
-  // and ElementsTransitionGenerator::GenerateDoubleToObject
-  { REG(edx), REG(ebx), REG(edi), EMIT_REMEMBERED_SET},
-  { REG(edx), REG(ebx), REG(edi), OMIT_REMEMBERED_SET},
-  // ElementsTransitionGenerator::GenerateDoubleToObject
-  { REG(eax), REG(edx), REG(esi), EMIT_REMEMBERED_SET},
-  { REG(edx), REG(eax), REG(edi), EMIT_REMEMBERED_SET},
-  // StoreArrayLiteralElementStub::Generate
-  { REG(ebx), REG(eax), REG(ecx), EMIT_REMEMBERED_SET},
-  // FastNewClosureStub and StringAddStub::Generate
-  { REG(ecx), REG(edx), REG(ebx), EMIT_REMEMBERED_SET},
-  // StringAddStub::Generate
-  { REG(ecx), REG(eax), REG(ebx), EMIT_REMEMBERED_SET},
-  // Null termination.
-  { REG(no_reg), REG(no_reg), REG(no_reg), EMIT_REMEMBERED_SET}
-};
-
-#undef REG
-
-bool RecordWriteStub::IsPregenerated(Isolate* isolate) {
-  for (const AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
-       !entry->object.is(no_reg);
-       entry++) {
-    if (object_.is(entry->object) &&
-        value_.is(entry->value) &&
-        address_.is(entry->address) &&
-        remembered_set_action_ == entry->action &&
-        save_fp_regs_mode_ == kDontSaveFPRegs) {
-      return true;
-    }
-  }
-  return false;
-}
-
-
 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
     Isolate* isolate) {
   StoreBufferOverflowStub stub(kDontSaveFPRegs);
@@ -5361,20 +5291,6 @@ void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
 }
 
 
-void RecordWriteStub::GenerateFixedRegStubsAheadOfTime(Isolate* isolate) {
-  for (const AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
-       !entry->object.is(no_reg);
-       entry++) {
-    RecordWriteStub stub(entry->object,
-                         entry->value,
-                         entry->address,
-                         entry->action,
-                         kDontSaveFPRegs);
-    stub.GetCode(isolate)->set_is_pregenerated(true);
-  }
-}
-
-
 bool CodeStub::CanUseFPRegisters() {
   return CpuFeatures::IsSupported(SSE2);
 }
@@ -5703,10 +5619,6 @@ void StubFailureTailCallTrampolineStub::Generate(MacroAssembler* masm) {
 
 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
   if (masm->isolate()->function_entry_hook() != NULL) {
-    // It's always safe to call the entry hook stub, as the hook itself
-    // is not allowed to call back to V8.
-    AllowStubCallsScope allow_stub_calls(masm, true);
-
     ProfileEntryHookStub stub;
     masm->CallStub(&stub);
   }
index 006651c..1425924 100644 (file)
@@ -74,7 +74,6 @@ class StoreBufferOverflowStub: public PlatformCodeStub {
 
   void Generate(MacroAssembler* masm);
 
-  virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE { return true; }
   static void GenerateFixedRegStubsAheadOfTime(Isolate* isolate);
   virtual bool SometimesSetsUpAFrame() { return false; }
 
@@ -303,8 +302,6 @@ class RecordWriteStub: public PlatformCodeStub {
     INCREMENTAL_COMPACTION
   };
 
-  virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE;
-  static void GenerateFixedRegStubsAheadOfTime(Isolate* isolate);
   virtual bool SometimesSetsUpAFrame() { return false; }
 
   static const byte kTwoByteNopInstruction = 0x3c;  // Cmpb al, #imm8.
index f1c6a48..52d42f6 100644 (file)
@@ -46,7 +46,6 @@ namespace internal {
 MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size)
     : Assembler(arg_isolate, buffer, size),
       generating_stub_(false),
-      allow_stub_calls_(true),
       has_frame_(false) {
   if (isolate() != NULL) {
     // TODO(titzer): should we just use a null handle here instead?
@@ -2188,8 +2187,6 @@ void MacroAssembler::CallStub(CodeStub* stub, TypeFeedbackId ast_id) {
 
 
 void MacroAssembler::TailCallStub(CodeStub* stub) {
-  ASSERT(allow_stub_calls_ ||
-         stub->CompilingCallsToThisStubIsGCSafe(isolate()));
   jmp(stub->GetCode(isolate()), RelocInfo::CODE_TARGET);
 }
 
@@ -2201,8 +2198,7 @@ void MacroAssembler::StubReturn(int argc) {
 
 
 bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
-  if (!has_frame_ && stub->SometimesSetsUpAFrame()) return false;
-  return allow_stub_calls_ || stub->CompilingCallsToThisStubIsGCSafe(isolate());
+  return has_frame_ || !stub->SometimesSetsUpAFrame();
 }
 
 
index 881a236..054b164 100644 (file)
@@ -912,8 +912,6 @@ class MacroAssembler: public Assembler {
   // Verify restrictions about code generated in stubs.
   void set_generating_stub(bool value) { generating_stub_ = value; }
   bool generating_stub() { return generating_stub_; }
-  void set_allow_stub_calls(bool value) { allow_stub_calls_ = value; }
-  bool allow_stub_calls() { return allow_stub_calls_; }
   void set_has_frame(bool value) { has_frame_ = value; }
   bool has_frame() { return has_frame_; }
   inline bool AllowThisStubCall(CodeStub* stub);
@@ -999,7 +997,6 @@ class MacroAssembler: public Assembler {
 
  private:
   bool generating_stub_;
-  bool allow_stub_calls_;
   bool has_frame_;
   // This handle will be patched with the code object on installation.
   Handle<Object> code_object_;
index 4fb3a70..fbc2f11 100644 (file)
@@ -2781,21 +2781,11 @@ bool CEntryStub::NeedsImmovableCode() {
 }
 
 
-bool CEntryStub::IsPregenerated(Isolate* isolate) {
-#ifdef _WIN64
-  return result_size_ == 1;
-#else
-  return true;
-#endif
-}
-
-
 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
   CEntryStub::GenerateAheadOfTime(isolate);
   StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
   StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
   // It is important that the store buffer overflow stubs are generated first.
-  RecordWriteStub::GenerateFixedRegStubsAheadOfTime(isolate);
   ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
   CreateAllocationSiteStub::GenerateAheadOfTime(isolate);
   BinaryOpStub::GenerateAheadOfTime(isolate);
@@ -5085,71 +5075,6 @@ void NameDictionaryLookupStub::Generate(MacroAssembler* masm) {
 }
 
 
-struct AheadOfTimeWriteBarrierStubList {
-  Register object, value, address;
-  RememberedSetAction action;
-};
-
-
-#define REG(Name) { kRegister_ ## Name ## _Code }
-
-struct AheadOfTimeWriteBarrierStubList kAheadOfTime[] = {
-  // Used in RegExpExecStub.
-  { REG(rbx), REG(rax), REG(rdi), EMIT_REMEMBERED_SET },
-  // Used in CompileArrayPushCall.
-  { REG(rbx), REG(rcx), REG(rdx), EMIT_REMEMBERED_SET },
-  // Used in StoreStubCompiler::CompileStoreField and
-  // KeyedStoreStubCompiler::CompileStoreField via GenerateStoreField.
-  { REG(rdx), REG(rcx), REG(rbx), EMIT_REMEMBERED_SET },
-  // GenerateStoreField calls the stub with two different permutations of
-  // registers.  This is the second.
-  { REG(rbx), REG(rcx), REG(rdx), EMIT_REMEMBERED_SET },
-  // StoreIC::GenerateNormal via GenerateDictionaryStore.
-  { REG(rbx), REG(r8), REG(r9), EMIT_REMEMBERED_SET },
-  // KeyedStoreIC::GenerateGeneric.
-  { REG(rbx), REG(rdx), REG(rcx), EMIT_REMEMBERED_SET},
-  // KeyedStoreStubCompiler::GenerateStoreFastElement.
-  { REG(rdi), REG(rbx), REG(rcx), EMIT_REMEMBERED_SET},
-  { REG(rdx), REG(rdi), REG(rbx), EMIT_REMEMBERED_SET},
-  // ElementsTransitionGenerator::GenerateMapChangeElementTransition
-  // and ElementsTransitionGenerator::GenerateSmiToDouble
-  // and ElementsTransitionGenerator::GenerateDoubleToObject
-  { REG(rdx), REG(rbx), REG(rdi), EMIT_REMEMBERED_SET},
-  { REG(rdx), REG(rbx), REG(rdi), OMIT_REMEMBERED_SET},
-  // ElementsTransitionGenerator::GenerateSmiToDouble
-  // and ElementsTransitionGenerator::GenerateDoubleToObject
-  { REG(rdx), REG(r11), REG(r15), EMIT_REMEMBERED_SET},
-  // ElementsTransitionGenerator::GenerateDoubleToObject
-  { REG(r11), REG(rax), REG(r15), EMIT_REMEMBERED_SET},
-  // StoreArrayLiteralElementStub::Generate
-  { REG(rbx), REG(rax), REG(rcx), EMIT_REMEMBERED_SET},
-  // FastNewClosureStub::Generate and
-  // StringAddStub::Generate
-  { REG(rcx), REG(rdx), REG(rbx), EMIT_REMEMBERED_SET},
-  // StringAddStub::Generate
-  { REG(rcx), REG(rax), REG(rbx), EMIT_REMEMBERED_SET},
-  // Null termination.
-  { REG(no_reg), REG(no_reg), REG(no_reg), EMIT_REMEMBERED_SET}
-};
-
-#undef REG
-
-bool RecordWriteStub::IsPregenerated(Isolate* isolate) {
-  for (AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
-       !entry->object.is(no_reg);
-       entry++) {
-    if (object_.is(entry->object) &&
-        value_.is(entry->value) &&
-        address_.is(entry->address) &&
-        remembered_set_action_ == entry->action &&
-        save_fp_regs_mode_ == kDontSaveFPRegs) {
-      return true;
-    }
-  }
-  return false;
-}
-
-
 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
     Isolate* isolate) {
   StoreBufferOverflowStub stub1(kDontSaveFPRegs);
@@ -5159,20 +5084,6 @@ void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
 }
 
 
-void RecordWriteStub::GenerateFixedRegStubsAheadOfTime(Isolate* isolate) {
-  for (AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
-       !entry->object.is(no_reg);
-       entry++) {
-    RecordWriteStub stub(entry->object,
-                         entry->value,
-                         entry->address,
-                         entry->action,
-                         kDontSaveFPRegs);
-    stub.GetCode(isolate)->set_is_pregenerated(true);
-  }
-}
-
-
 bool CodeStub::CanUseFPRegisters() {
   return true;  // Always have SSE2 on x64.
 }
@@ -5496,10 +5407,6 @@ void StubFailureTailCallTrampolineStub::Generate(MacroAssembler* masm) {
 
 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
   if (masm->isolate()->function_entry_hook() != NULL) {
-    // It's always safe to call the entry hook stub, as the hook itself
-    // is not allowed to call back to V8.
-    AllowStubCallsScope allow_stub_calls(masm, true);
-
     ProfileEntryHookStub stub;
     masm->CallStub(&stub);
   }
index c76abcf..7a3f6a6 100644 (file)
@@ -69,7 +69,6 @@ class StoreBufferOverflowStub: public PlatformCodeStub {
 
   void Generate(MacroAssembler* masm);
 
-  virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE { return true; }
   static void GenerateFixedRegStubsAheadOfTime(Isolate* isolate);
   virtual bool SometimesSetsUpAFrame() { return false; }
 
@@ -293,8 +292,6 @@ class RecordWriteStub: public PlatformCodeStub {
     INCREMENTAL_COMPACTION
   };
 
-  virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE;
-  static void GenerateFixedRegStubsAheadOfTime(Isolate* isolate);
   virtual bool SometimesSetsUpAFrame() { return false; }
 
   static const byte kTwoByteNopInstruction = 0x3c;  // Cmpb al, #imm8.
index b62ffc6..6c3f501 100644 (file)
@@ -45,7 +45,6 @@ namespace internal {
 MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size)
     : Assembler(arg_isolate, buffer, size),
       generating_stub_(false),
-      allow_stub_calls_(true),
       has_frame_(false),
       root_array_available_(true) {
   if (isolate() != NULL) {
@@ -555,8 +554,6 @@ void MacroAssembler::CallStub(CodeStub* stub, TypeFeedbackId ast_id) {
 
 
 void MacroAssembler::TailCallStub(CodeStub* stub) {
-  ASSERT(allow_stub_calls_ ||
-         stub->CompilingCallsToThisStubIsGCSafe(isolate()));
   Jump(stub->GetCode(isolate()), RelocInfo::CODE_TARGET);
 }
 
@@ -568,8 +565,7 @@ void MacroAssembler::StubReturn(int argc) {
 
 
 bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
-  if (!has_frame_ && stub->SometimesSetsUpAFrame()) return false;
-  return allow_stub_calls_ || stub->CompilingCallsToThisStubIsGCSafe(isolate());
+  return has_frame_ || !stub->SometimesSetsUpAFrame();
 }
 
 
@@ -1049,14 +1045,7 @@ void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) {
   if (emit_debug_code()) {
     movq(dst, Smi::FromInt(kSmiConstantRegisterValue), RelocInfo::NONE64);
     cmpq(dst, kSmiConstantRegister);
-    if (allow_stub_calls()) {
-      Assert(equal, kUninitializedKSmiConstantRegister);
-    } else {
-      Label ok;
-      j(equal, &ok, Label::kNear);
-      int3();
-      bind(&ok);
-    }
+    Assert(equal, kUninitializedKSmiConstantRegister);
   }
   int value = source->value();
   if (value == 0) {
@@ -1117,11 +1106,7 @@ void MacroAssembler::Integer32ToSmiField(const Operand& dst, Register src) {
     testb(dst, Immediate(0x01));
     Label ok;
     j(zero, &ok, Label::kNear);
-    if (allow_stub_calls()) {
-      Abort(kInteger32ToSmiFieldWritingToNonSmiLocation);
-    } else {
-      int3();
-    }
+    Abort(kInteger32ToSmiFieldWritingToNonSmiLocation);
     bind(&ok);
   }
   ASSERT(kSmiShift % kBitsPerByte == 0);
@@ -2182,10 +2167,8 @@ void MacroAssembler::SelectNonSmi(Register dst,
   ASSERT(!dst.is(src2));
   // Both operands must not be smis.
 #ifdef DEBUG
-  if (allow_stub_calls()) {  // Check contains a stub call.
-    Condition not_both_smis = NegateCondition(CheckBothSmi(src1, src2));
-    Check(not_both_smis, kBothRegistersWereSmisInSelectNonSmi);
-  }
+  Condition not_both_smis = NegateCondition(CheckBothSmi(src1, src2));
+  Check(not_both_smis, kBothRegistersWereSmisInSelectNonSmi);
 #endif
   STATIC_ASSERT(kSmiTag == 0);
   ASSERT_EQ(0, Smi::FromInt(0));
index 868c648..98808a8 100644 (file)
@@ -1400,8 +1400,6 @@ class MacroAssembler: public Assembler {
   // Verify restrictions about code generated in stubs.
   void set_generating_stub(bool value) { generating_stub_ = value; }
   bool generating_stub() { return generating_stub_; }
-  void set_allow_stub_calls(bool value) { allow_stub_calls_ = value; }
-  bool allow_stub_calls() { return allow_stub_calls_; }
   void set_has_frame(bool value) { has_frame_ = value; }
   bool has_frame() { return has_frame_; }
   inline bool AllowThisStubCall(CodeStub* stub);
@@ -1451,7 +1449,6 @@ class MacroAssembler: public Assembler {
   static const int kSmiShift = kSmiTagSize + kSmiShiftSize;
 
   bool generating_stub_;
-  bool allow_stub_calls_;
   bool has_frame_;
   bool root_array_available_;
 
index 54eaa58..53cdd16 100644 (file)
@@ -53,7 +53,6 @@ ConvertDToIFunc MakeConvertDToIFuncTrampoline(Isolate* isolate,
   CHECK(buffer);
   HandleScope handles(isolate);
   MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size));
-  masm.set_allow_stub_calls(false);
   DoubleToIStub stub(source_reg, destination_reg, 0, true, inline_fastpath);
 
   byte* start = stub.GetCode(isolate)->instruction_start();
index 34db99f..c206a01 100644 (file)
@@ -53,7 +53,6 @@ ConvertDToIFunc MakeConvertDToIFuncTrampoline(Isolate* isolate,
   CHECK(buffer);
   HandleScope handles(isolate);
   MacroAssembler assm(isolate, buffer, static_cast<int>(actual_size));
-  assm.set_allow_stub_calls(false);
   int offset =
     source_reg.is(esp) ? 0 : (HeapNumber::kValueOffset - kSmiTagSize);
   DoubleToIStub stub(source_reg, destination_reg, offset, true);
index 4f89a63..6b3a12c 100644 (file)
@@ -52,7 +52,6 @@ ConvertDToIFunc MakeConvertDToIFuncTrampoline(Isolate* isolate,
   CHECK(buffer);
   HandleScope handles(isolate);
   MacroAssembler assm(isolate, buffer, static_cast<int>(actual_size));
-  assm.set_allow_stub_calls(false);
   int offset =
     source_reg.is(rsp) ? 0 : (HeapNumber::kValueOffset - kSmiTagSize);
   DoubleToIStub stub(source_reg, destination_reg, offset, true);
index 377450a..d40b8a5 100644 (file)
@@ -148,7 +148,6 @@ TEST(LoadAndStoreWithRepresentation) {
   HandleScope handles(isolate);
   MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size));
   MacroAssembler* masm = &assembler;  // Create a pointer for the __ macro.
-  masm->set_allow_stub_calls(false);
   __ sub(sp, sp, Operand(1 * kPointerSize));
   Label exit;
 
index 4c817dc..38c738f 100644 (file)
@@ -62,7 +62,6 @@ TEST(LoadAndStoreWithRepresentation) {
   HandleScope handles(isolate);
   MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size));
   MacroAssembler* masm = &assembler;  // Create a pointer for the __ macro.
-  masm->set_allow_stub_calls(false);
   __ push(ebx);
   __ push(edx);
   __ sub(esp, Immediate(1 * kPointerSize));
index a85d8e0..3f25162 100644 (file)
@@ -165,7 +165,6 @@ TEST(SmiMove) {
   HandleScope handles(isolate);
   MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size));
   MacroAssembler* masm = &assembler;  // Create a pointer for the __ macro.
-  masm->set_allow_stub_calls(false);
   EntryCode(masm);
   Label exit;
 
@@ -255,7 +254,6 @@ TEST(SmiCompare) {
   MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size));
 
   MacroAssembler* masm = &assembler;
-  masm->set_allow_stub_calls(false);
   EntryCode(masm);
   Label exit;
 
@@ -306,7 +304,6 @@ TEST(Integer32ToSmi) {
   MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size));
 
   MacroAssembler* masm = &assembler;
-  masm->set_allow_stub_calls(false);
   EntryCode(masm);
   Label exit;
 
@@ -435,7 +432,6 @@ TEST(Integer64PlusConstantToSmi) {
   MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size));
 
   MacroAssembler* masm = &assembler;
-  masm->set_allow_stub_calls(false);
   EntryCode(masm);
   Label exit;
 
@@ -480,7 +476,6 @@ TEST(SmiCheck) {
   MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size));
 
   MacroAssembler* masm = &assembler;
-  masm->set_allow_stub_calls(false);
   EntryCode(masm);
   Label exit;
   Condition cond;
@@ -729,7 +724,6 @@ TEST(SmiNeg) {
   MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size));
 
   MacroAssembler* masm = &assembler;
-  masm->set_allow_stub_calls(false);
   EntryCode(masm);
   Label exit;
 
@@ -937,7 +931,7 @@ TEST(SmiAdd) {
   // Allocate an executable page of memory.
   size_t actual_size;
   byte* buffer =
-      static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize * 2,
+      static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize * 3,
                                       &actual_size,
                                       true));
   CHECK(buffer);
@@ -946,7 +940,6 @@ TEST(SmiAdd) {
   MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size));
 
   MacroAssembler* masm = &assembler;
-  masm->set_allow_stub_calls(false);
   EntryCode(masm);
   Label exit;
 
@@ -1159,7 +1152,7 @@ TEST(SmiSub) {
   // Allocate an executable page of memory.
   size_t actual_size;
   byte* buffer =
-      static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize * 2,
+      static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize * 4,
                                       &actual_size,
                                       true));
   CHECK(buffer);
@@ -1168,7 +1161,6 @@ TEST(SmiSub) {
   MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size));
 
   MacroAssembler* masm = &assembler;
-  masm->set_allow_stub_calls(false);
   EntryCode(masm);
   Label exit;
 
@@ -1259,7 +1251,6 @@ TEST(SmiMul) {
   MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size));
 
   MacroAssembler* masm = &assembler;
-  masm->set_allow_stub_calls(false);
   EntryCode(masm);
   Label exit;
 
@@ -1366,7 +1357,6 @@ TEST(SmiDiv) {
   MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size));
 
   MacroAssembler* masm = &assembler;
-  masm->set_allow_stub_calls(false);
   EntryCode(masm);
   Label exit;
 
@@ -1477,7 +1467,6 @@ TEST(SmiMod) {
   MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size));
 
   MacroAssembler* masm = &assembler;
-  masm->set_allow_stub_calls(false);
   EntryCode(masm);
   Label exit;
 
@@ -1566,7 +1555,7 @@ TEST(SmiIndex) {
   // Allocate an executable page of memory.
   size_t actual_size;
   byte* buffer =
-      static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize * 3,
+      static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize * 4,
                                       &actual_size,
                                       true));
   CHECK(buffer);
@@ -1575,7 +1564,6 @@ TEST(SmiIndex) {
   MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size));
 
   MacroAssembler* masm = &assembler;
-  masm->set_allow_stub_calls(false);
   EntryCode(masm);
   Label exit;
 
@@ -1636,7 +1624,7 @@ TEST(SmiSelectNonSmi) {
   // Allocate an executable page of memory.
   size_t actual_size;
   byte* buffer =
-      static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize,
+      static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize * 2,
                                       &actual_size,
                                       true));
   CHECK(buffer);
@@ -1645,7 +1633,6 @@ TEST(SmiSelectNonSmi) {
   MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size));
 
   MacroAssembler* masm = &assembler;
-  masm->set_allow_stub_calls(false);  // Avoid inline checks.
   EntryCode(masm);
   Label exit;
 
@@ -1716,7 +1703,7 @@ TEST(SmiAnd) {
   // Allocate an executable page of memory.
   size_t actual_size;
   byte* buffer =
-      static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize,
+      static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize * 2,
                                       &actual_size,
                                       true));
   CHECK(buffer);
@@ -1725,7 +1712,6 @@ TEST(SmiAnd) {
   MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size));
 
   MacroAssembler* masm = &assembler;
-  masm->set_allow_stub_calls(false);
   EntryCode(masm);
   Label exit;
 
@@ -1798,7 +1784,7 @@ TEST(SmiOr) {
   // Allocate an executable page of memory.
   size_t actual_size;
   byte* buffer =
-      static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize,
+      static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize * 2,
                                       &actual_size,
                                       true));
   CHECK(buffer);
@@ -1807,7 +1793,6 @@ TEST(SmiOr) {
   MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size));
 
   MacroAssembler* masm = &assembler;
-  masm->set_allow_stub_calls(false);
   EntryCode(masm);
   Label exit;
 
@@ -1882,7 +1867,7 @@ TEST(SmiXor) {
   // Allocate an executable page of memory.
   size_t actual_size;
   byte* buffer =
-      static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize,
+      static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize * 2,
                                       &actual_size,
                                       true));
   CHECK(buffer);
@@ -1891,7 +1876,6 @@ TEST(SmiXor) {
   MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size));
 
   MacroAssembler* masm = &assembler;
-  masm->set_allow_stub_calls(false);
   EntryCode(masm);
   Label exit;
 
@@ -1959,7 +1943,6 @@ TEST(SmiNot) {
   MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size));
 
   MacroAssembler* masm = &assembler;
-  masm->set_allow_stub_calls(false);
   EntryCode(masm);
   Label exit;
 
@@ -2047,7 +2030,7 @@ TEST(SmiShiftLeft) {
   // Allocate an executable page of memory.
   size_t actual_size;
   byte* buffer =
-      static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize * 4,
+      static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize * 7,
                                       &actual_size,
                                       true));
   CHECK(buffer);
@@ -2056,7 +2039,6 @@ TEST(SmiShiftLeft) {
   MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size));
 
   MacroAssembler* masm = &assembler;
-  masm->set_allow_stub_calls(false);
   EntryCode(masm);
   Label exit;
 
@@ -2154,7 +2136,7 @@ TEST(SmiShiftLogicalRight) {
   // Allocate an executable page of memory.
   size_t actual_size;
   byte* buffer =
-      static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize * 3,
+      static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize * 5,
                                       &actual_size,
                                       true));
   CHECK(buffer);
@@ -2163,7 +2145,6 @@ TEST(SmiShiftLogicalRight) {
   MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size));
 
   MacroAssembler* masm = &assembler;
-  masm->set_allow_stub_calls(false);
   EntryCode(masm);
   Label exit;
 
@@ -2224,7 +2205,7 @@ TEST(SmiShiftArithmeticRight) {
   // Allocate an executable page of memory.
   size_t actual_size;
   byte* buffer =
-      static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize * 2,
+      static_cast<byte*>(OS::Allocate(Assembler::kMinimalBufferSize * 3,
                                       &actual_size,
                                       true));
   CHECK(buffer);
@@ -2233,7 +2214,6 @@ TEST(SmiShiftArithmeticRight) {
   MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size));
 
   MacroAssembler* masm = &assembler;
-  masm->set_allow_stub_calls(false);
   EntryCode(masm);
   Label exit;
 
@@ -2298,7 +2278,6 @@ TEST(PositiveSmiTimesPowerOfTwoToInteger64) {
   MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size));
 
   MacroAssembler* masm = &assembler;
-  masm->set_allow_stub_calls(false);
   EntryCode(masm);
   Label exit;
 
@@ -2342,7 +2321,6 @@ TEST(OperandOffset) {
   MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size));
 
   MacroAssembler* masm = &assembler;
-  masm->set_allow_stub_calls(false);
   Label exit;
 
   EntryCode(masm);
@@ -2695,7 +2673,6 @@ TEST(LoadAndStoreWithRepresentation) {
   HandleScope handles(isolate);
   MacroAssembler assembler(isolate, buffer, static_cast<int>(actual_size));
   MacroAssembler* masm = &assembler;  // Create a pointer for the __ macro.
-  masm->set_allow_stub_calls(false);
   EntryCode(masm);
   __ subq(rsp, Immediate(1 * kPointerSize));
   Label exit;