CodeStubs contain their corresponding Isolate* now. (part 1)
authorsvenpanne@chromium.org <svenpanne@chromium.org@ce2b1a6d-e550-0410-aec6-3dcde31c8c00>
Thu, 24 Apr 2014 06:25:42 +0000 (06:25 +0000)
committersvenpanne@chromium.org <svenpanne@chromium.org@ce2b1a6d-e550-0410-aec6-3dcde31c8c00>
Thu, 24 Apr 2014 06:25:42 +0000 (06:25 +0000)
This is a purely mechanical change, adding an Isolate* to the CodeStub
constructor and a corresponding field plus a getter. A few methods in
CodeStub and its subclasses can be simplified now, but this is done in
a separate CL.

The underlying reason apart from simplicity is that deep down in the
call chain we need to detect if the serializer is active or not. This
information will be part of the Isolate, not a global variable with
funky synchronization primitives around it (which is fundamentally
wrong and the underlying cause for race conditions and a catch-22
during initialization).

BUG=359977
LOG=y
R=mstarzinger@chromium.org

Review URL: https://codereview.chromium.org/246643014

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@20919 ce2b1a6d-e550-0410-aec6-3dcde31c8c00

48 files changed:
src/arm/builtins-arm.cc
src/arm/code-stubs-arm.cc
src/arm/code-stubs-arm.h
src/arm/debug-arm.cc
src/arm/full-codegen-arm.cc
src/arm/lithium-codegen-arm.cc
src/arm/macro-assembler-arm.cc
src/arm/regexp-macro-assembler-arm.cc
src/arm/stub-cache-arm.cc
src/arm64/builtins-arm64.cc
src/arm64/code-stubs-arm64.cc
src/arm64/code-stubs-arm64.h
src/arm64/debug-arm64.cc
src/arm64/full-codegen-arm64.cc
src/arm64/lithium-codegen-arm64.cc
src/arm64/lithium-codegen-arm64.h
src/arm64/macro-assembler-arm64.cc
src/arm64/regexp-macro-assembler-arm64.cc
src/arm64/stub-cache-arm64.cc
src/code-stubs.cc
src/code-stubs.h
src/deoptimizer.cc
src/frames.cc
src/heap.cc
src/hydrogen.cc
src/ia32/builtins-ia32.cc
src/ia32/code-stubs-ia32.cc
src/ia32/code-stubs-ia32.h
src/ia32/debug-ia32.cc
src/ia32/full-codegen-ia32.cc
src/ia32/lithium-codegen-ia32.cc
src/ia32/macro-assembler-ia32.cc
src/ia32/stub-cache-ia32.cc
src/ic.cc
src/stub-cache.cc
src/type-info.cc
src/x64/builtins-x64.cc
src/x64/code-stubs-x64.cc
src/x64/code-stubs-x64.h
src/x64/debug-x64.cc
src/x64/full-codegen-x64.cc
src/x64/lithium-codegen-x64.cc
src/x64/macro-assembler-x64.cc
src/x64/stub-cache-x64.cc
test/cctest/test-code-stubs-arm.cc
test/cctest/test-code-stubs-arm64.cc
test/cctest/test-code-stubs-ia32.cc
test/cctest/test-code-stubs-x64.cc

index 81d7fcb5f24acc5ff0136980a5e1c519ea08ec71..ba46fa02ede8d059a19b9c68dafff916681a353c 100644 (file)
@@ -807,7 +807,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
     if (is_construct) {
       // No type feedback cell is available
       __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
-      CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
+      CallConstructStub stub(masm->isolate(), NO_CALL_FUNCTION_FLAGS);
       __ CallStub(&stub);
     } else {
       ParameterCount actual(r0);
index be876b6eee89c27378fcd2f79826bd0ad364a75a..d1587c8886e838a5935ccbdd7def4df8b7ac3a58 100644 (file)
@@ -504,10 +504,9 @@ static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm,
 
 void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) {
   // Update the static counter each time a new code stub is generated.
-  Isolate* isolate = masm->isolate();
-  isolate->counters()->code_stubs()->Increment();
+  isolate()->counters()->code_stubs()->Increment();
 
-  CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(isolate);
+  CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(isolate());
   int param_count = descriptor->register_param_count_;
   {
     // Call the runtime system in a fresh internal frame.
@@ -533,11 +532,13 @@ void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) {
 // stub so you don't have to set up the frame.
 class ConvertToDoubleStub : public PlatformCodeStub {
  public:
-  ConvertToDoubleStub(Register result_reg_1,
+  ConvertToDoubleStub(Isolate* isolate,
+                      Register result_reg_1,
                       Register result_reg_2,
                       Register source_reg,
                       Register scratch_reg)
-      : result1_(result_reg_1),
+      : PlatformCodeStub(isolate),
+        result1_(result_reg_1),
         result2_(result_reg_2),
         source_(source_reg),
         zeros_(scratch_reg) { }
@@ -726,8 +727,8 @@ void DoubleToIStub::Generate(MacroAssembler* masm) {
 
 void WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime(
     Isolate* isolate) {
-  WriteInt32ToHeapNumberStub stub1(r1, r0, r2);
-  WriteInt32ToHeapNumberStub stub2(r2, r0, r3);
+  WriteInt32ToHeapNumberStub stub1(isolate, r1, r0, r2);
+  WriteInt32ToHeapNumberStub stub2(isolate, r2, r0, r3);
   stub1.GetCode(isolate);
   stub2.GetCode(isolate);
 }
@@ -1124,7 +1125,6 @@ void ICCompareStub::GenerateGeneric(MacroAssembler* masm) {
   __ bind(&both_loaded_as_doubles);
   // The arguments have been converted to doubles and stored in d6 and d7, if
   // VFP3 is supported, or in r0, r1, r2, and r3.
-  Isolate* isolate = masm->isolate();
   __ bind(&lhs_not_nan);
   Label no_nan;
   // ARMv7 VFP3 instructions to implement double precision comparison.
@@ -1187,7 +1187,8 @@ void ICCompareStub::GenerateGeneric(MacroAssembler* masm) {
 
   __ JumpIfNonSmisNotBothSequentialAsciiStrings(lhs, rhs, r2, r3, &slow);
 
-  __ IncrementCounter(isolate->counters()->string_compare_native(), 1, r2, r3);
+  __ IncrementCounter(isolate()->counters()->string_compare_native(), 1, r2,
+                      r3);
   if (cc == eq) {
     StringCompareStub::GenerateFlatAsciiStringEquals(masm,
                                                      lhs,
@@ -1251,9 +1252,9 @@ void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
 
   AllowExternalCallThatCantCauseGC scope(masm);
   __ PrepareCallCFunction(argument_count, fp_argument_count, scratch);
-  __ mov(r0, Operand(ExternalReference::isolate_address(masm->isolate())));
+  __ mov(r0, Operand(ExternalReference::isolate_address(isolate())));
   __ CallCFunction(
-      ExternalReference::store_buffer_overflow_function(masm->isolate()),
+      ExternalReference::store_buffer_overflow_function(isolate()),
       argument_count);
   if (save_doubles_ == kSaveFPRegs) {
     __ RestoreFPRegs(sp, scratch);
@@ -1373,7 +1374,7 @@ void MathPowStub::Generate(MacroAssembler* masm) {
       __ PrepareCallCFunction(0, 2, scratch);
       __ MovToFloatParameters(double_base, double_exponent);
       __ CallCFunction(
-          ExternalReference::power_double_double_function(masm->isolate()),
+          ExternalReference::power_double_double_function(isolate()),
           0, 2);
     }
     __ pop(lr);
@@ -1424,7 +1425,7 @@ void MathPowStub::Generate(MacroAssembler* masm) {
   __ vcvt_f64_s32(double_exponent, single_scratch);
 
   // Returning or bailing out.
-  Counters* counters = masm->isolate()->counters();
+  Counters* counters = isolate()->counters();
   if (exponent_type_ == ON_STACK) {
     // The arguments are still on the stack.
     __ bind(&call_runtime);
@@ -1447,7 +1448,7 @@ void MathPowStub::Generate(MacroAssembler* masm) {
       __ PrepareCallCFunction(0, 2, scratch);
       __ MovToFloatParameters(double_base, double_exponent);
       __ CallCFunction(
-          ExternalReference::power_double_double_function(masm->isolate()),
+          ExternalReference::power_double_double_function(isolate()),
           0, 2);
     }
     __ pop(lr);
@@ -1479,8 +1480,8 @@ void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
 
 void CodeStub::GenerateFPStubs(Isolate* isolate) {
   SaveFPRegsMode mode = kSaveFPRegs;
-  CEntryStub save_doubles(1, mode);
-  StoreBufferOverflowStub stub(mode);
+  CEntryStub save_doubles(isolate, 1, mode);
+  StoreBufferOverflowStub stub(isolate, mode);
   // These stubs might already be in the snapshot, detect that and don't
   // regenerate, which would lead to code stub initialization state being messed
   // up.
@@ -1497,7 +1498,7 @@ void CodeStub::GenerateFPStubs(Isolate* isolate) {
 
 
 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
-  CEntryStub stub(1, kDontSaveFPRegs);
+  CEntryStub stub(isolate, 1, kDontSaveFPRegs);
   stub.GetCode(isolate);
 }
 
@@ -1531,8 +1532,6 @@ void CEntryStub::Generate(MacroAssembler* masm) {
 
   // Result returned in r0 or r0+r1 by default.
 
-  Isolate* isolate = masm->isolate();
-
 #if V8_HOST_ARCH_ARM
   int frame_alignment = MacroAssembler::ActivationFrameAlignment();
   int frame_alignment_mask = frame_alignment - 1;
@@ -1551,7 +1550,7 @@ void CEntryStub::Generate(MacroAssembler* masm) {
 
   // Call C built-in.
   // r0 = argc, r1 = argv
-  __ mov(r2, Operand(ExternalReference::isolate_address(isolate)));
+  __ mov(r2, Operand(ExternalReference::isolate_address(isolate())));
 
   // To let the GC traverse the return address of the exit frames, we need to
   // know where the return address is. The CEntryStub is unmovable, so
@@ -1586,7 +1585,7 @@ void CEntryStub::Generate(MacroAssembler* masm) {
   __ b(eq, &exception_returned);
 
   ExternalReference pending_exception_address(
-      Isolate::kPendingExceptionAddress, isolate);
+      Isolate::kPendingExceptionAddress, isolate());
 
   // Check that there is no pending exception, otherwise we
   // should have returned the exception sentinel.
@@ -1673,15 +1672,14 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
   // r2: receiver
   // r3: argc
   // r4: argv
-  Isolate* isolate = masm->isolate();
   int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
   if (FLAG_enable_ool_constant_pool) {
-    __ mov(r8, Operand(isolate->factory()->empty_constant_pool_array()));
+    __ mov(r8, Operand(isolate()->factory()->empty_constant_pool_array()));
   }
   __ mov(r7, Operand(Smi::FromInt(marker)));
   __ mov(r6, Operand(Smi::FromInt(marker)));
   __ mov(r5,
-         Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate)));
+         Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate())));
   __ ldr(r5, MemOperand(r5));
   __ mov(ip, Operand(-1));  // Push a bad frame pointer to fail if it is used.
   __ stm(db_w, sp, r5.bit() | r6.bit() | r7.bit() |
@@ -1693,7 +1691,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
 
   // If this is the outermost JS call, set js_entry_sp value.
   Label non_outermost_js;
-  ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate);
+  ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate());
   __ mov(r5, Operand(ExternalReference(js_entry_sp)));
   __ ldr(r6, MemOperand(r5));
   __ cmp(r6, Operand::Zero());
@@ -1723,7 +1721,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
     // fp will be invalid because the PushTryHandler below sets it to 0 to
     // signal the existence of the JSEntry frame.
     __ mov(ip, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
-                                         isolate)));
+                                         isolate())));
   }
   __ str(r0, MemOperand(ip));
   __ LoadRoot(r0, Heap::kExceptionRootIndex);
@@ -1740,9 +1738,9 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
   // saved values before returning a failure to C.
 
   // Clear any pending exceptions.
-  __ mov(r5, Operand(isolate->factory()->the_hole_value()));
+  __ mov(r5, Operand(isolate()->factory()->the_hole_value()));
   __ mov(ip, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
-                                       isolate)));
+                                       isolate())));
   __ str(r5, MemOperand(ip));
 
   // Invoke the function by calling through JS entry trampoline builtin.
@@ -1757,10 +1755,10 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
   // r4: argv
   if (is_construct) {
     ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
-                                      isolate);
+                                      isolate());
     __ mov(ip, Operand(construct_entry));
   } else {
-    ExternalReference entry(Builtins::kJSEntryTrampoline, isolate);
+    ExternalReference entry(Builtins::kJSEntryTrampoline, isolate());
     __ mov(ip, Operand(entry));
   }
   __ ldr(ip, MemOperand(ip));  // deref address
@@ -1786,7 +1784,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
   // Restore the top frame descriptors from the stack.
   __ pop(r3);
   __ mov(ip,
-         Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate)));
+         Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate())));
   __ str(r3, MemOperand(ip));
 
   // Reset the stack to the callee saved registers.
@@ -1945,7 +1943,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
   __ b(ne, &slow);
 
   // Null is not instance of anything.
-  __ cmp(scratch, Operand(masm->isolate()->factory()->null_value()));
+  __ cmp(scratch, Operand(isolate()->factory()->null_value()));
   __ b(ne, &object_not_null);
   __ mov(r0, Operand(Smi::FromInt(1)));
   __ Ret(HasArgsInRegisters() ? 0 : 2);
@@ -1992,7 +1990,7 @@ void FunctionPrototypeStub::Generate(MacroAssembler* masm) {
     //  -- r0    : key
     //  -- r1    : receiver
     // -----------------------------------
-    __ cmp(r0, Operand(masm->isolate()->factory()->prototype_string()));
+    __ cmp(r0, Operand(isolate()->factory()->prototype_string()));
     __ b(ne, &miss);
     receiver = r1;
   } else {
@@ -2422,11 +2420,10 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
   Register last_match_info_elements = no_reg;  // will be r6;
 
   // Ensure that a RegExp stack is allocated.
-  Isolate* isolate = masm->isolate();
   ExternalReference address_of_regexp_stack_memory_address =
-      ExternalReference::address_of_regexp_stack_memory_address(isolate);
+      ExternalReference::address_of_regexp_stack_memory_address(isolate());
   ExternalReference address_of_regexp_stack_memory_size =
-      ExternalReference::address_of_regexp_stack_memory_size(isolate);
+      ExternalReference::address_of_regexp_stack_memory_size(isolate());
   __ mov(r0, Operand(address_of_regexp_stack_memory_size));
   __ ldr(r0, MemOperand(r0, 0));
   __ cmp(r0, Operand::Zero());
@@ -2568,7 +2565,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
   // subject: Subject string
   // regexp_data: RegExp data (FixedArray)
   // All checks done. Now push arguments for native regexp code.
-  __ IncrementCounter(isolate->counters()->regexp_entry_native(), 1, r0, r2);
+  __ IncrementCounter(isolate()->counters()->regexp_entry_native(), 1, r0, r2);
 
   // Isolates: note we add an additional parameter here (isolate pointer).
   const int kRegExpExecuteArguments = 9;
@@ -2579,7 +2576,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
   // Arguments are before that on the stack or in registers.
 
   // Argument 9 (sp[20]): Pass current isolate address.
-  __ mov(r0, Operand(ExternalReference::isolate_address(isolate)));
+  __ mov(r0, Operand(ExternalReference::isolate_address(isolate())));
   __ str(r0, MemOperand(sp, 5 * kPointerSize));
 
   // Argument 8 (sp[16]): Indicate that this is a direct call from JavaScript.
@@ -2601,7 +2598,8 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
 
   // Argument 5 (sp[4]): static offsets vector buffer.
   __ mov(r0,
-         Operand(ExternalReference::address_of_static_offsets_vector(isolate)));
+         Operand(ExternalReference::address_of_static_offsets_vector(
+             isolate())));
   __ str(r0, MemOperand(sp, 1 * kPointerSize));
 
   // For arguments 4 and 3 get string length, calculate start of string data and
@@ -2632,7 +2630,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
 
   // Locate the code entry and call it.
   __ add(r6, r6, Operand(Code::kHeaderSize - kHeapObjectTag));
-  DirectCEntryStub stub;
+  DirectCEntryStub stub(isolate());
   stub.GenerateCall(masm, r6);
 
   __ LeaveExitFrame(false, no_reg, true);
@@ -2659,9 +2657,9 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
   // stack overflow (on the backtrack stack) was detected in RegExp code but
   // haven't created the exception yet. Handle that in the runtime system.
   // TODO(592): Rerunning the RegExp to get the stack overflow exception.
-  __ mov(r1, Operand(isolate->factory()->the_hole_value()));
+  __ mov(r1, Operand(isolate()->factory()->the_hole_value()));
   __ mov(r2, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
-                                       isolate)));
+                                       isolate())));
   __ ldr(r0, MemOperand(r2, 0));
   __ cmp(r0, r1);
   __ b(eq, &runtime);
@@ -2681,7 +2679,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
 
   __ bind(&failure);
   // For failure and exception return null.
-  __ mov(r0, Operand(masm->isolate()->factory()->null_value()));
+  __ mov(r0, Operand(isolate()->factory()->null_value()));
   __ add(sp, sp, Operand(4 * kPointerSize));
   __ Ret();
 
@@ -2743,7 +2741,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
 
   // Get the static offsets vector filled by the native regexp code.
   ExternalReference address_of_static_offsets_vector =
-      ExternalReference::address_of_static_offsets_vector(isolate);
+      ExternalReference::address_of_static_offsets_vector(isolate());
   __ mov(r2, Operand(address_of_static_offsets_vector));
 
   // r1: number of capture registers
@@ -2888,7 +2886,7 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
       __ SmiTag(r0);
       __ Push(r3, r2, r1, r0);
 
-      CreateAllocationSiteStub create_stub;
+      CreateAllocationSiteStub create_stub(masm->isolate());
       __ CallStub(&create_stub);
 
       __ Pop(r3, r2, r1, r0);
@@ -2977,8 +2975,8 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
       // If there is a call target cache, mark it megamorphic in the
       // non-function case.  MegamorphicSentinel is an immortal immovable
       // object (megamorphic symbol) so no write barrier is needed.
-      ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(masm->isolate()),
-                masm->isolate()->heap()->megamorphic_symbol());
+      ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(isolate()),
+                isolate()->heap()->megamorphic_symbol());
       __ add(r5, r2, Operand::PointerOffsetFromSmiKey(r3));
       __ LoadRoot(ip, Heap::kMegamorphicSymbolRootIndex);
       __ str(ip, FieldMemOperand(r5, FixedArray::kHeaderSize));
@@ -2992,7 +2990,7 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
     __ GetBuiltinFunction(r1, Builtins::CALL_FUNCTION_PROXY);
     {
       Handle<Code> adaptor =
-        masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
+        isolate()->builtins()->ArgumentsAdaptorTrampoline();
       __ Jump(adaptor, RelocInfo::CODE_TARGET);
     }
 
@@ -3003,7 +3001,7 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
     __ mov(r0, Operand(argc_));  // Set up the number of arguments.
     __ mov(r2, Operand::Zero());
     __ GetBuiltinFunction(r1, Builtins::CALL_NON_FUNCTION);
-    __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
+    __ Jump(isolate()->builtins()->ArgumentsAdaptorTrampoline(),
             RelocInfo::CODE_TARGET);
   }
 
@@ -3080,7 +3078,7 @@ void CallConstructStub::Generate(MacroAssembler* masm) {
   __ bind(&do_call);
   // Set expected number of arguments to zero (not changing r0).
   __ mov(r2, Operand::Zero());
-  __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
+  __ Jump(isolate()->builtins()->ArgumentsAdaptorTrampoline(),
           RelocInfo::CODE_TARGET);
 }
 
@@ -3608,7 +3606,7 @@ void SubStringStub::Generate(MacroAssembler* masm) {
       masm, r1, r5, r2, r3, r4, r6, r9, DEST_ALWAYS_ALIGNED);
 
   __ bind(&return_r0);
-  Counters* counters = masm->isolate()->counters();
+  Counters* counters = isolate()->counters();
   __ IncrementCounter(counters->sub_string_native(), 1, r3, r4);
   __ Drop(3);
   __ Ret();
@@ -3743,7 +3741,7 @@ void StringCompareStub::GenerateAsciiCharsCompareLoop(
 void StringCompareStub::Generate(MacroAssembler* masm) {
   Label runtime;
 
-  Counters* counters = masm->isolate()->counters();
+  Counters* counters = isolate()->counters();
 
   // Stack frame on entry.
   //  sp[0]: right string
@@ -3783,12 +3781,11 @@ void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
   //  -- r0    : right
   //  -- lr    : return address
   // -----------------------------------
-  Isolate* isolate = masm->isolate();
 
   // Load r2 with the allocation site.  We stick an undefined dummy value here
   // and replace it with the real allocation site later when we instantiate this
   // stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate().
-  __ Move(r2, handle(isolate->heap()->undefined_value()));
+  __ Move(r2, handle(isolate()->heap()->undefined_value()));
 
   // Make sure that we actually patched the allocation site.
   if (FLAG_debug_code) {
@@ -3804,7 +3801,7 @@ void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
 
   // Tail call into the stub that handles binary operations with allocation
   // sites.
-  BinaryOpWithAllocationSiteStub stub(state_);
+  BinaryOpWithAllocationSiteStub stub(isolate(), state_);
   __ TailCallStub(&stub);
 }
 
@@ -3882,9 +3879,9 @@ void ICCompareStub::GenerateNumbers(MacroAssembler* masm) {
 
   __ bind(&unordered);
   __ bind(&generic_stub);
-  ICCompareStub stub(op_, CompareIC::GENERIC, CompareIC::GENERIC,
+  ICCompareStub stub(isolate(), op_, CompareIC::GENERIC, CompareIC::GENERIC,
                      CompareIC::GENERIC);
-  __ Jump(stub.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
+  __ Jump(stub.GetCode(isolate()), RelocInfo::CODE_TARGET);
 
   __ bind(&maybe_undefined1);
   if (Token::IsOrderedRelationalCompareOp(op_)) {
@@ -4107,7 +4104,7 @@ void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
   {
     // Call the runtime system in a fresh internal frame.
     ExternalReference miss =
-        ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate());
+        ExternalReference(IC_Utility(IC::kCompareIC_Miss), isolate());
 
     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
     __ Push(r1, r0);
@@ -4139,7 +4136,7 @@ void DirectCEntryStub::Generate(MacroAssembler* masm) {
 void DirectCEntryStub::GenerateCall(MacroAssembler* masm,
                                     Register target) {
   intptr_t code =
-      reinterpret_cast<intptr_t>(GetCode(masm->isolate()).location());
+      reinterpret_cast<intptr_t>(GetCode(isolate()).location());
   __ Move(ip, target);
   __ mov(lr, Operand(code, RelocInfo::CODE_TARGET));
   __ blx(lr);  // Call the stub.
@@ -4215,7 +4212,7 @@ void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
   __ stm(db_w, sp, spill_mask);
   __ ldr(r0, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
   __ mov(r1, Operand(Handle<Name>(name)));
-  NameDictionaryLookupStub stub(NEGATIVE_LOOKUP);
+  NameDictionaryLookupStub stub(masm->isolate(), NEGATIVE_LOOKUP);
   __ CallStub(&stub);
   __ cmp(r0, Operand::Zero());
   __ ldm(ia_w, sp, spill_mask);
@@ -4291,7 +4288,7 @@ void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
     __ Move(r0, elements);
     __ Move(r1, name);
   }
-  NameDictionaryLookupStub stub(POSITIVE_LOOKUP);
+  NameDictionaryLookupStub stub(masm->isolate(), POSITIVE_LOOKUP);
   __ CallStub(&stub);
   __ cmp(r0, Operand::Zero());
   __ mov(scratch2, Operand(r2));
@@ -4395,10 +4392,10 @@ void NameDictionaryLookupStub::Generate(MacroAssembler* masm) {
 
 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
     Isolate* isolate) {
-  StoreBufferOverflowStub stub1(kDontSaveFPRegs);
+  StoreBufferOverflowStub stub1(isolate, kDontSaveFPRegs);
   stub1.GetCode(isolate);
   // Hydrogen code stubs need stub2 at snapshot time.
-  StoreBufferOverflowStub stub2(kSaveFPRegs);
+  StoreBufferOverflowStub stub2(isolate, kSaveFPRegs);
   stub2.GetCode(isolate);
 }
 
@@ -4504,12 +4501,11 @@ void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) {
   __ Move(address, regs_.address());
   __ Move(r0, regs_.object());
   __ Move(r1, address);
-  __ mov(r2, Operand(ExternalReference::isolate_address(masm->isolate())));
+  __ mov(r2, Operand(ExternalReference::isolate_address(isolate())));
 
   AllowExternalCallThatCantCauseGC scope(masm);
   __ CallCFunction(
-      ExternalReference::incremental_marking_record_write_function(
-          masm->isolate()),
+      ExternalReference::incremental_marking_record_write_function(isolate()),
       argument_count);
   regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_);
 }
@@ -4664,8 +4660,8 @@ void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
 
 
 void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
-  CEntryStub ces(1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs);
-  __ Call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
+  CEntryStub ces(isolate(), 1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs);
+  __ Call(ces.GetCode(isolate()), RelocInfo::CODE_TARGET);
   int parameter_count_offset =
       StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset;
   __ ldr(r1, MemOperand(fp, parameter_count_offset));
@@ -4681,7 +4677,7 @@ void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
 
 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
   if (masm->isolate()->function_entry_hook() != NULL) {
-    ProfileEntryHookStub stub;
+    ProfileEntryHookStub stub(masm->isolate());
     int code_size = masm->CallStubSize(&stub) + 2 * Assembler::kInstrSize;
     PredictableCodeSizeScope predictable(masm, code_size);
     __ push(lr);
@@ -4729,18 +4725,18 @@ void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
 
 #if V8_HOST_ARCH_ARM
   int32_t entry_hook =
-      reinterpret_cast<int32_t>(masm->isolate()->function_entry_hook());
+      reinterpret_cast<int32_t>(isolate()->function_entry_hook());
   __ mov(ip, Operand(entry_hook));
 #else
   // Under the simulator we need to indirect the entry hook through a
   // trampoline function at a known address.
   // It additionally takes an isolate as a third parameter
-  __ mov(r2, Operand(ExternalReference::isolate_address(masm->isolate())));
+  __ mov(r2, Operand(ExternalReference::isolate_address(isolate())));
 
   ApiFunction dispatcher(FUNCTION_ADDR(EntryHookTrampoline));
   __ mov(ip, Operand(ExternalReference(&dispatcher,
                                        ExternalReference::BUILTIN_CALL,
-                                       masm->isolate())));
+                                       isolate())));
 #endif
   __ Call(ip);
 
@@ -4758,7 +4754,7 @@ template<class T>
 static void CreateArrayDispatch(MacroAssembler* masm,
                                 AllocationSiteOverrideMode mode) {
   if (mode == DISABLE_ALLOCATION_SITES) {
-    T stub(GetInitialFastElementsKind(), mode);
+    T stub(masm->isolate(), GetInitialFastElementsKind(), mode);
     __ TailCallStub(&stub);
   } else if (mode == DONT_OVERRIDE) {
     int last_index = GetSequenceIndexFromFastElementsKind(
@@ -4766,7 +4762,7 @@ static void CreateArrayDispatch(MacroAssembler* masm,
     for (int i = 0; i <= last_index; ++i) {
       ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
       __ cmp(r3, Operand(kind));
-      T stub(kind);
+      T stub(masm->isolate(), kind);
       __ TailCallStub(&stub, eq);
     }
 
@@ -4808,12 +4804,14 @@ static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
     ElementsKind initial = GetInitialFastElementsKind();
     ElementsKind holey_initial = GetHoleyElementsKind(initial);
 
-    ArraySingleArgumentConstructorStub stub_holey(holey_initial,
+    ArraySingleArgumentConstructorStub stub_holey(masm->isolate(),
+                                                  holey_initial,
                                                   DISABLE_ALLOCATION_SITES);
     __ TailCallStub(&stub_holey);
 
     __ bind(&normal_sequence);
-    ArraySingleArgumentConstructorStub stub(initial,
+    ArraySingleArgumentConstructorStub stub(masm->isolate(),
+                                            initial,
                                             DISABLE_ALLOCATION_SITES);
     __ TailCallStub(&stub);
   } else if (mode == DONT_OVERRIDE) {
@@ -4841,7 +4839,7 @@ static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
     for (int i = 0; i <= last_index; ++i) {
       ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
       __ cmp(r3, Operand(kind));
-      ArraySingleArgumentConstructorStub stub(kind);
+      ArraySingleArgumentConstructorStub stub(masm->isolate(), kind);
       __ TailCallStub(&stub, eq);
     }
 
@@ -4859,10 +4857,10 @@ static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
       TERMINAL_FAST_ELEMENTS_KIND);
   for (int i = 0; i <= to_index; ++i) {
     ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
-    T stub(kind);
+    T stub(isolate, kind);
     stub.GetCode(isolate);
     if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) {
-      T stub1(kind, DISABLE_ALLOCATION_SITES);
+      T stub1(isolate, kind, DISABLE_ALLOCATION_SITES);
       stub1.GetCode(isolate);
     }
   }
@@ -4884,11 +4882,11 @@ void InternalArrayConstructorStubBase::GenerateStubsAheadOfTime(
   ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS };
   for (int i = 0; i < 2; i++) {
     // For internal arrays we only need a few things
-    InternalArrayNoArgumentConstructorStub stubh1(kinds[i]);
+    InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[i]);
     stubh1.GetCode(isolate);
-    InternalArraySingleArgumentConstructorStub stubh2(kinds[i]);
+    InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]);
     stubh2.GetCode(isolate);
-    InternalArrayNArgumentsConstructorStub stubh3(kinds[i]);
+    InternalArrayNArgumentsConstructorStub stubh3(isolate, kinds[i]);
     stubh3.GetCode(isolate);
   }
 }
@@ -4967,10 +4965,10 @@ void InternalArrayConstructorStub::GenerateCase(
     MacroAssembler* masm, ElementsKind kind) {
   __ cmp(r0, Operand(1));
 
-  InternalArrayNoArgumentConstructorStub stub0(kind);
+  InternalArrayNoArgumentConstructorStub stub0(isolate(), kind);
   __ TailCallStub(&stub0, lo);
 
-  InternalArrayNArgumentsConstructorStub stubN(kind);
+  InternalArrayNArgumentsConstructorStub stubN(isolate(), kind);
   __ TailCallStub(&stubN, hi);
 
   if (IsFastPackedElementsKind(kind)) {
@@ -4980,11 +4978,11 @@ void InternalArrayConstructorStub::GenerateCase(
     __ cmp(r3, Operand::Zero());
 
     InternalArraySingleArgumentConstructorStub
-        stub1_holey(GetHoleyElementsKind(kind));
+        stub1_holey(isolate(), GetHoleyElementsKind(kind));
     __ TailCallStub(&stub1_holey, ne);
   }
 
-  InternalArraySingleArgumentConstructorStub stub1(kind);
+  InternalArraySingleArgumentConstructorStub stub1(isolate(), kind);
   __ TailCallStub(&stub1);
 }
 
@@ -5073,8 +5071,6 @@ void CallApiFunctionStub::Generate(MacroAssembler* masm) {
   STATIC_ASSERT(FCA::kHolderIndex == 0);
   STATIC_ASSERT(FCA::kArgsLength == 7);
 
-  Isolate* isolate = masm->isolate();
-
   // context save
   __ push(context);
   // load context from callee
@@ -5096,7 +5092,7 @@ void CallApiFunctionStub::Generate(MacroAssembler* masm) {
   __ push(scratch);
   // isolate
   __ mov(scratch,
-         Operand(ExternalReference::isolate_address(isolate)));
+         Operand(ExternalReference::isolate_address(isolate())));
   __ push(scratch);
   // holder
   __ push(holder);
@@ -5132,7 +5128,7 @@ void CallApiFunctionStub::Generate(MacroAssembler* masm) {
   ExternalReference::Type thunk_type = ExternalReference::PROFILING_API_CALL;
   ApiFunction thunk_fun(thunk_address);
   ExternalReference thunk_ref = ExternalReference(&thunk_fun, thunk_type,
-      masm->isolate());
+      isolate());
 
   AllowExternalCallThatCantCauseGC scope(masm);
   MemOperand context_restore_operand(
@@ -5183,7 +5179,7 @@ void CallApiGetterStub::Generate(MacroAssembler* masm) {
       ExternalReference::PROFILING_GETTER_CALL;
   ApiFunction thunk_fun(thunk_address);
   ExternalReference thunk_ref = ExternalReference(&thunk_fun, thunk_type,
-      masm->isolate());
+      isolate());
   __ CallApiFunctionAndReturn(api_function_address,
                               thunk_ref,
                               kStackUnwindSpace,
index ef78802bef0fb33da7c90da330690868e4cd32c3..5e5ef7b8d44d9ffe8491406e0b8f93acc690e4f8 100644 (file)
@@ -39,8 +39,8 @@ void ArrayNativeCode(MacroAssembler* masm, Label* call_generic_code);
 
 class StoreBufferOverflowStub: public PlatformCodeStub {
  public:
-  explicit StoreBufferOverflowStub(SaveFPRegsMode save_fp)
-      : save_doubles_(save_fp) {}
+  StoreBufferOverflowStub(Isolate* isolate, SaveFPRegsMode save_fp)
+      : PlatformCodeStub(isolate), save_doubles_(save_fp) {}
 
   void Generate(MacroAssembler* masm);
 
@@ -91,7 +91,7 @@ class StringHelper : public AllStatic {
 
 class SubStringStub: public PlatformCodeStub {
  public:
-  SubStringStub() {}
+  explicit SubStringStub(Isolate* isolate) : PlatformCodeStub(isolate) {}
 
  private:
   Major MajorKey() { return SubString; }
@@ -104,7 +104,7 @@ class SubStringStub: public PlatformCodeStub {
 
 class StringCompareStub: public PlatformCodeStub {
  public:
-  StringCompareStub() { }
+  explicit StringCompareStub(Isolate* isolate) : PlatformCodeStub(isolate) { }
 
   // Compares two flat ASCII strings and returns result in r0.
   static void GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
@@ -144,10 +144,12 @@ class StringCompareStub: public PlatformCodeStub {
 // so you don't have to set up the frame.
 class WriteInt32ToHeapNumberStub : public PlatformCodeStub {
  public:
-  WriteInt32ToHeapNumberStub(Register the_int,
+  WriteInt32ToHeapNumberStub(Isolate* isolate,
+                             Register the_int,
                              Register the_heap_number,
                              Register scratch)
-      : the_int_(the_int),
+      : PlatformCodeStub(isolate),
+        the_int_(the_int),
         the_heap_number_(the_heap_number),
         scratch_(scratch) { }
 
@@ -177,12 +179,14 @@ class WriteInt32ToHeapNumberStub : public PlatformCodeStub {
 
 class RecordWriteStub: public PlatformCodeStub {
  public:
-  RecordWriteStub(Register object,
+  RecordWriteStub(Isolate* isolate,
+                  Register object,
                   Register value,
                   Register address,
                   RememberedSetAction remembered_set_action,
                   SaveFPRegsMode fp_mode)
-      : object_(object),
+      : PlatformCodeStub(isolate),
+        object_(object),
         value_(value),
         address_(address),
         remembered_set_action_(remembered_set_action),
@@ -363,7 +367,7 @@ class RecordWriteStub: public PlatformCodeStub {
 // moved by GC
 class DirectCEntryStub: public PlatformCodeStub {
  public:
-  DirectCEntryStub() {}
+  explicit DirectCEntryStub(Isolate* isolate) : PlatformCodeStub(isolate) {}
   void Generate(MacroAssembler* masm);
   void GenerateCall(MacroAssembler* masm, Register target);
 
@@ -379,7 +383,8 @@ class NameDictionaryLookupStub: public PlatformCodeStub {
  public:
   enum LookupMode { POSITIVE_LOOKUP, NEGATIVE_LOOKUP };
 
-  explicit NameDictionaryLookupStub(LookupMode mode) : mode_(mode) { }
+  NameDictionaryLookupStub(Isolate* isolate, LookupMode mode)
+      : PlatformCodeStub(isolate), mode_(mode) { }
 
   void Generate(MacroAssembler* masm);
 
index 12258ccad916c139b795630a400d434d4a93bba3..7e4d7ded6ab65608a0feb071ef3970ae4a0deb7b 100644 (file)
@@ -146,7 +146,7 @@ static void Generate_DebugBreakCallHelper(MacroAssembler* masm,
     __ mov(r0, Operand::Zero());  // no arguments
     __ mov(r1, Operand(ExternalReference::debug_break(masm->isolate())));
 
-    CEntryStub ceb(1);
+    CEntryStub ceb(masm->isolate(), 1);
     __ CallStub(&ceb);
 
     // Restore the register values from the expression stack.
index 3679a6f7310d01e029eb6ef43a158c1ce475e6cf..002f7d5f3c8acf35382190ef733fea4fe8eab56c 100644 (file)
@@ -242,7 +242,7 @@ void FullCodeGenerator::Generate() {
       __ Push(info->scope()->GetScopeInfo());
       __ CallRuntime(Runtime::kHiddenNewGlobalContext, 2);
     } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
-      FastNewContextStub stub(heap_slots);
+      FastNewContextStub stub(isolate(), heap_slots);
       __ CallStub(&stub);
     } else {
       __ push(r1);
@@ -303,7 +303,7 @@ void FullCodeGenerator::Generate() {
     } else {
       type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
     }
-    ArgumentsAccessStub stub(type);
+    ArgumentsAccessStub stub(isolate(), type);
     __ CallStub(&stub);
 
     SetVar(arguments, r0, r1, r2);
@@ -1357,7 +1357,9 @@ void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
       !pretenure &&
       scope()->is_function_scope() &&
       info->num_literals() == 0) {
-    FastNewClosureStub stub(info->strict_mode(), info->is_generator());
+    FastNewClosureStub stub(isolate(),
+                            info->strict_mode(),
+                            info->is_generator());
     __ mov(r2, Operand(info));
     __ CallStub(&stub);
   } else {
@@ -1683,7 +1685,7 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
     __ Push(r3, r2, r1, r0);
     __ CallRuntime(Runtime::kHiddenCreateObjectLiteral, 4);
   } else {
-    FastCloneShallowObjectStub stub(properties_count);
+    FastCloneShallowObjectStub stub(isolate(), properties_count);
     __ CallStub(&stub);
   }
 
@@ -1822,6 +1824,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
   if (has_fast_elements && constant_elements_values->map() ==
       isolate()->heap()->fixed_cow_array_map()) {
     FastCloneShallowArrayStub stub(
+        isolate(),
         FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
         allocation_site_mode,
         length);
@@ -1843,7 +1846,8 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
       mode = FastCloneShallowArrayStub::CLONE_ELEMENTS;
     }
 
-    FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
+    FastCloneShallowArrayStub stub(isolate(), mode, allocation_site_mode,
+                                   length);
     __ CallStub(&stub);
   }
 
@@ -1875,7 +1879,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
                           EMIT_REMEMBERED_SET, INLINE_SMI_CHECK);
     } else {
       __ mov(r3, Operand(Smi::FromInt(i)));
-      StoreArrayLiteralElementStub stub;
+      StoreArrayLiteralElementStub stub(isolate());
       __ CallStub(&stub);
     }
 
@@ -2120,7 +2124,7 @@ void FullCodeGenerator::VisitYield(Yield* expr) {
       CallIC(ic, TypeFeedbackId::None());
       __ mov(r1, r0);
       __ str(r1, MemOperand(sp, 2 * kPointerSize));
-      CallFunctionStub stub(1, CALL_AS_METHOD);
+      CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
       __ CallStub(&stub);
 
       __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
@@ -2349,7 +2353,7 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
   patch_site.EmitJumpIfSmi(scratch1, &smi_case);
 
   __ bind(&stub_call);
-  BinaryOpICStub stub(op, mode);
+  BinaryOpICStub stub(isolate(), op, mode);
   CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
   patch_site.EmitPatchInfo();
   __ jmp(&done);
@@ -2425,7 +2429,7 @@ void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
                                      Token::Value op,
                                      OverwriteMode mode) {
   __ pop(r1);
-  BinaryOpICStub stub(op, mode);
+  BinaryOpICStub stub(isolate(), op, mode);
   JumpPatchSite patch_site(masm_);    // unbound, signals no inlined smi code.
   CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
   patch_site.EmitPatchInfo();
@@ -2672,7 +2676,7 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr) {
 
   // Record source position for debugger.
   SetSourcePosition(expr->position());
-  CallFunctionStub stub(arg_count, flags);
+  CallFunctionStub stub(isolate(), arg_count, flags);
   __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
   __ CallStub(&stub);
 
@@ -2714,7 +2718,7 @@ void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
 
   // Record source position for debugger.
   SetSourcePosition(expr->position());
-  CallFunctionStub stub(arg_count, CALL_AS_METHOD);
+  CallFunctionStub stub(isolate(), arg_count, CALL_AS_METHOD);
   __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
   __ CallStub(&stub);
 
@@ -2745,7 +2749,7 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr) {
   __ mov(r3, Operand(Smi::FromInt(expr->CallFeedbackSlot())));
 
   // Record call targets in unoptimized code.
-  CallFunctionStub stub(arg_count, RECORD_CALL_TARGET);
+  CallFunctionStub stub(isolate(), arg_count, RECORD_CALL_TARGET);
   __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
   __ CallStub(&stub);
   RecordJSReturnSite(expr);
@@ -2822,7 +2826,7 @@ void FullCodeGenerator::VisitCall(Call* expr) {
 
     // Record source position for debugger.
     SetSourcePosition(expr->position());
-    CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS);
+    CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
     __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
     __ CallStub(&stub);
     RecordJSReturnSite(expr);
@@ -2940,7 +2944,7 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
   __ Move(r2, FeedbackVector());
   __ mov(r3, Operand(Smi::FromInt(expr->CallNewFeedbackSlot())));
 
-  CallConstructStub stub(RECORD_CALL_TARGET);
+  CallConstructStub stub(isolate(), RECORD_CALL_TARGET);
   __ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL);
   PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
   context()->Plug(r0);
@@ -3311,7 +3315,7 @@ void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
   VisitForAccumulatorValue(args->at(0));
   __ mov(r1, r0);
   __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
-  ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
+  ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
   __ CallStub(&stub);
   context()->Plug(r0);
 }
@@ -3421,7 +3425,7 @@ void FullCodeGenerator::EmitLog(CallRuntime* expr) {
 
 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
   // Load the arguments on the stack and call the stub.
-  SubStringStub stub;
+  SubStringStub stub(isolate());
   ZoneList<Expression*>* args = expr->arguments();
   ASSERT(args->length() == 3);
   VisitForStackValue(args->at(0));
@@ -3434,7 +3438,7 @@ void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
 
 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
   // Load the arguments on the stack and call the stub.
-  RegExpExecStub stub;
+  RegExpExecStub stub(isolate());
   ZoneList<Expression*>* args = expr->arguments();
   ASSERT(args->length() == 4);
   VisitForStackValue(args->at(0));
@@ -3584,7 +3588,7 @@ void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
   ASSERT(args->length() == 2);
   VisitForStackValue(args->at(0));
   VisitForStackValue(args->at(1));
-  MathPowStub stub(MathPowStub::ON_STACK);
+  MathPowStub stub(isolate(), MathPowStub::ON_STACK);
   __ CallStub(&stub);
   context()->Plug(r0);
 }
@@ -3624,7 +3628,7 @@ void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
   // Load the argument into r0 and call the stub.
   VisitForAccumulatorValue(args->at(0));
 
-  NumberToStringStub stub;
+  NumberToStringStub stub(isolate());
   __ CallStub(&stub);
   context()->Plug(r0);
 }
@@ -3747,7 +3751,7 @@ void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
   VisitForAccumulatorValue(args->at(1));
 
   __ pop(r1);
-  StringAddStub stub(STRING_ADD_CHECK_BOTH, NOT_TENURED);
+  StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
   __ CallStub(&stub);
   context()->Plug(r0);
 }
@@ -3759,7 +3763,7 @@ void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
   VisitForStackValue(args->at(0));
   VisitForStackValue(args->at(1));
 
-  StringCompareStub stub;
+  StringCompareStub stub(isolate());
   __ CallStub(&stub);
   context()->Plug(r0);
 }
@@ -3798,7 +3802,7 @@ void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
 
 
 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
-  RegExpConstructResultStub stub;
+  RegExpConstructResultStub stub(isolate());
   ZoneList<Expression*>* args = expr->arguments();
   ASSERT(args->length() == 3);
   VisitForStackValue(args->at(0));
@@ -4164,7 +4168,7 @@ void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
 
     // Record source position of the IC call.
     SetSourcePosition(expr->position());
-    CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS);
+    CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
     __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
     __ CallStub(&stub);
 
@@ -4382,7 +4386,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
     __ jmp(&stub_call);
     __ bind(&slow);
   }
-  ToNumberStub convert_stub;
+  ToNumberStub convert_stub(isolate());
   __ CallStub(&convert_stub);
 
   // Save result for postfix expressions.
@@ -4413,7 +4417,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
   // Record position before stub call.
   SetSourcePosition(expr->position());
 
-  BinaryOpICStub stub(Token::ADD, NO_OVERWRITE);
+  BinaryOpICStub stub(isolate(), Token::ADD, NO_OVERWRITE);
   CallIC(stub.GetCode(isolate()), expr->CountBinOpFeedbackId());
   patch_site.EmitPatchInfo();
   __ bind(&done);
@@ -4623,7 +4627,7 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
 
     case Token::INSTANCEOF: {
       VisitForStackValue(expr->right());
-      InstanceofStub stub(InstanceofStub::kNoFlags);
+      InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
       __ CallStub(&stub);
       PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
       // The stub returns 0 for true.
index ee695e4f6d47893f9a677533ad137ba8a01b9784..11013e74ff65b9eb7b153163bb61b71415cecfe3 100644 (file)
@@ -207,7 +207,7 @@ bool LCodeGen::GeneratePrologue() {
     Comment(";;; Allocate local context");
     // Argument to NewContext is the function, which is in r1.
     if (heap_slots <= FastNewContextStub::kMaximumSlots) {
-      FastNewContextStub stub(heap_slots);
+      FastNewContextStub stub(isolate(), heap_slots);
       __ CallStub(&stub);
     } else {
       __ push(r1);
@@ -1106,17 +1106,17 @@ void LCodeGen::DoCallStub(LCallStub* instr) {
   ASSERT(ToRegister(instr->result()).is(r0));
   switch (instr->hydrogen()->major_key()) {
     case CodeStub::RegExpExec: {
-      RegExpExecStub stub;
+      RegExpExecStub stub(isolate());
       CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
       break;
     }
     case CodeStub::SubString: {
-      SubStringStub stub;
+      SubStringStub stub(isolate());
       CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
       break;
     }
     case CodeStub::StringCompare: {
-      StringCompareStub stub;
+      StringCompareStub stub(isolate());
       CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
       break;
     }
@@ -2159,7 +2159,7 @@ void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
   ASSERT(ToRegister(instr->right()).is(r0));
   ASSERT(ToRegister(instr->result()).is(r0));
 
-  BinaryOpICStub stub(instr->op(), NO_OVERWRITE);
+  BinaryOpICStub stub(isolate(), instr->op(), NO_OVERWRITE);
   // Block literal pool emission to ensure nop indicating no inlined smi code
   // is in the correct position.
   Assembler::BlockConstPoolScope block_const_pool(masm());
@@ -2754,7 +2754,7 @@ void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
   ASSERT(ToRegister(instr->left()).is(r0));  // Object is in r0.
   ASSERT(ToRegister(instr->right()).is(r1));  // Function is in r1.
 
-  InstanceofStub stub(InstanceofStub::kArgsInRegisters);
+  InstanceofStub stub(isolate(), InstanceofStub::kArgsInRegisters);
   CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
 
   __ cmp(r0, Operand::Zero());
@@ -2851,7 +2851,7 @@ void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
       flags | InstanceofStub::kCallSiteInlineCheck);
   flags = static_cast<InstanceofStub::Flags>(
       flags | InstanceofStub::kReturnTrueFalseObject);
-  InstanceofStub stub(flags);
+  InstanceofStub stub(isolate(), flags);
 
   PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
   LoadContextFromDeferred(instr->context());
@@ -3870,7 +3870,7 @@ void LCodeGen::DoPower(LPower* instr) {
   ASSERT(ToDoubleRegister(instr->result()).is(d2));
 
   if (exponent_type.IsSmi()) {
-    MathPowStub stub(MathPowStub::TAGGED);
+    MathPowStub stub(isolate(), MathPowStub::TAGGED);
     __ CallStub(&stub);
   } else if (exponent_type.IsTagged()) {
     Label no_deopt;
@@ -3880,14 +3880,14 @@ void LCodeGen::DoPower(LPower* instr) {
     __ cmp(r6, Operand(ip));
     DeoptimizeIf(ne, instr->environment());
     __ bind(&no_deopt);
-    MathPowStub stub(MathPowStub::TAGGED);
+    MathPowStub stub(isolate(), MathPowStub::TAGGED);
     __ CallStub(&stub);
   } else if (exponent_type.IsInteger32()) {
-    MathPowStub stub(MathPowStub::INTEGER);
+    MathPowStub stub(isolate(), MathPowStub::INTEGER);
     __ CallStub(&stub);
   } else {
     ASSERT(exponent_type.IsDouble());
-    MathPowStub stub(MathPowStub::DOUBLE);
+    MathPowStub stub(isolate(), MathPowStub::DOUBLE);
     __ CallStub(&stub);
   }
 }
@@ -3994,7 +3994,7 @@ void LCodeGen::DoCallFunction(LCallFunction* instr) {
   ASSERT(ToRegister(instr->result()).is(r0));
 
   int arity = instr->arity();
-  CallFunctionStub stub(arity, instr->hydrogen()->function_flags());
+  CallFunctionStub stub(isolate(), arity, instr->hydrogen()->function_flags());
   CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
 }
 
@@ -4007,7 +4007,7 @@ void LCodeGen::DoCallNew(LCallNew* instr) {
   __ mov(r0, Operand(instr->arity()));
   // No cell in r2 for construct type feedback in optimized code
   __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
-  CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
+  CallConstructStub stub(isolate(), NO_CALL_FUNCTION_FLAGS);
   CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
 }
 
@@ -4026,7 +4026,7 @@ void LCodeGen::DoCallNewArray(LCallNewArray* instr) {
           : DONT_OVERRIDE;
 
   if (instr->arity() == 0) {
-    ArrayNoArgumentConstructorStub stub(kind, override_mode);
+    ArrayNoArgumentConstructorStub stub(isolate(), kind, override_mode);
     CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
   } else if (instr->arity() == 1) {
     Label done;
@@ -4039,17 +4039,19 @@ void LCodeGen::DoCallNewArray(LCallNewArray* instr) {
       __ b(eq, &packed_case);
 
       ElementsKind holey_kind = GetHoleyElementsKind(kind);
-      ArraySingleArgumentConstructorStub stub(holey_kind, override_mode);
+      ArraySingleArgumentConstructorStub stub(isolate(),
+                                              holey_kind,
+                                              override_mode);
       CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
       __ jmp(&done);
       __ bind(&packed_case);
     }
 
-    ArraySingleArgumentConstructorStub stub(kind, override_mode);
+    ArraySingleArgumentConstructorStub stub(isolate(), kind, override_mode);
     CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
     __ bind(&done);
   } else {
-    ArrayNArgumentsConstructorStub stub(kind, override_mode);
+    ArrayNArgumentsConstructorStub stub(isolate(), kind, override_mode);
     CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
   }
 }
@@ -4446,7 +4448,7 @@ void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) {
         this, Safepoint::kWithRegistersAndDoubles);
     __ Move(r1, to_map);
     bool is_js_array = from_map->instance_type() == JS_ARRAY_TYPE;
-    TransitionElementsKindStub stub(from_kind, to_kind, is_js_array);
+    TransitionElementsKindStub stub(isolate(), from_kind, to_kind, is_js_array);
     __ CallStub(&stub);
     RecordSafepointWithRegistersAndDoubles(
         instr->pointer_map(), 0, Safepoint::kLazyDeopt);
@@ -4469,7 +4471,8 @@ void LCodeGen::DoStringAdd(LStringAdd* instr) {
   ASSERT(ToRegister(instr->context()).is(cp));
   ASSERT(ToRegister(instr->left()).is(r1));
   ASSERT(ToRegister(instr->right()).is(r0));
-  StringAddStub stub(instr->hydrogen()->flags(),
+  StringAddStub stub(isolate(),
+                     instr->hydrogen()->flags(),
                      instr->hydrogen()->pretenure_flag());
   CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
 }
@@ -5447,7 +5450,8 @@ void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
   // space for nested functions that don't need literals cloning.
   bool pretenure = instr->hydrogen()->pretenure();
   if (!pretenure && instr->hydrogen()->has_no_literals()) {
-    FastNewClosureStub stub(instr->hydrogen()->strict_mode(),
+    FastNewClosureStub stub(isolate(),
+                            instr->hydrogen()->strict_mode(),
                             instr->hydrogen()->is_generator());
     __ mov(r2, Operand(instr->hydrogen()->shared_info()));
     CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
index 09ccbc39bd36a4138ebb80fc9663270300affb9e..0234e48ad60f552a6b3fbbf89cdf420daac72aaa 100644 (file)
@@ -556,7 +556,8 @@ void MacroAssembler::RecordWrite(Register object,
   if (lr_status == kLRHasNotBeenSaved) {
     push(lr);
   }
-  RecordWriteStub stub(object, value, address, remembered_set_action, fp_mode);
+  RecordWriteStub stub(isolate(), object, value, address, remembered_set_action,
+                       fp_mode);
   CallStub(&stub);
   if (lr_status == kLRHasNotBeenSaved) {
     pop(lr);
@@ -605,7 +606,7 @@ void MacroAssembler::RememberedSetHelper(Register object,  // For debug tests.
   }
   push(lr);
   StoreBufferOverflowStub store_buffer_overflow =
-      StoreBufferOverflowStub(fp_mode);
+      StoreBufferOverflowStub(isolate(), fp_mode);
   CallStub(&store_buffer_overflow);
   pop(lr);
   bind(&done);
@@ -1339,7 +1340,7 @@ void MacroAssembler::IsObjectNameType(Register object,
 void MacroAssembler::DebugBreak() {
   mov(r0, Operand::Zero());
   mov(r1, Operand(ExternalReference(Runtime::kDebugBreak, isolate())));
-  CEntryStub ces(1);
+  CEntryStub ces(isolate(), 1);
   ASSERT(AllowThisStubCall(&ces));
   Call(ces.GetCode(isolate()), RelocInfo::DEBUG_BREAK);
 }
@@ -2389,7 +2390,7 @@ void MacroAssembler::CallApiFunctionAndReturn(
   // Native call returns to the DirectCEntry stub which redirects to the
   // return address pushed on stack (could have moved after GC).
   // DirectCEntry stub itself is generated early and never moves.
-  DirectCEntryStub stub;
+  DirectCEntryStub stub(isolate());
   stub.GenerateCall(this, r3);
 
   if (FLAG_log_timer_events) {
@@ -2594,7 +2595,7 @@ void MacroAssembler::TruncateDoubleToI(Register result,
   sub(sp, sp, Operand(kDoubleSize));  // Put input on stack.
   vstr(double_input, MemOperand(sp, 0));
 
-  DoubleToIStub stub(sp, result, 0, true, true);
+  DoubleToIStub stub(isolate(), sp, result, 0, true, true);
   CallStub(&stub);
 
   add(sp, sp, Operand(kDoubleSize));
@@ -2616,7 +2617,8 @@ void MacroAssembler::TruncateHeapNumberToI(Register result,
 
   // If we fell through then inline version didn't succeed - call stub instead.
   push(lr);
-  DoubleToIStub stub(object,
+  DoubleToIStub stub(isolate(),
+                     object,
                      result,
                      HeapNumber::kValueOffset - kHeapObjectTag,
                      true,
@@ -2682,7 +2684,7 @@ void MacroAssembler::CallRuntime(const Runtime::Function* f,
   // smarter.
   mov(r0, Operand(num_arguments));
   mov(r1, Operand(ExternalReference(f, isolate())));
-  CEntryStub stub(1, save_doubles);
+  CEntryStub stub(isolate(), 1, save_doubles);
   CallStub(&stub);
 }
 
@@ -2692,7 +2694,7 @@ void MacroAssembler::CallExternalReference(const ExternalReference& ext,
   mov(r0, Operand(num_arguments));
   mov(r1, Operand(ext));
 
-  CEntryStub stub(1);
+  CEntryStub stub(isolate(), 1);
   CallStub(&stub);
 }
 
@@ -2724,7 +2726,7 @@ void MacroAssembler::JumpToExternalReference(const ExternalReference& builtin) {
   ASSERT((reinterpret_cast<intptr_t>(builtin.address()) & 1) == 1);
 #endif
   mov(r1, Operand(builtin));
-  CEntryStub stub(1);
+  CEntryStub stub(isolate(), 1);
   Jump(stub.GetCode(isolate()), RelocInfo::CODE_TARGET);
 }
 
index d61cfc7553b7fad999d925c6a5e9ffb25affde32..2622388c2e4ba4970aa74da5e6ff964c48ddcccf 100644 (file)
@@ -1043,7 +1043,7 @@ void RegExpMacroAssemblerARM::CallCheckStackGuardState(Register scratch) {
   ExternalReference stack_guard_check =
       ExternalReference::re_check_stack_guard_state(isolate());
   __ mov(ip, Operand(stack_guard_check));
-  DirectCEntryStub stub;
+  DirectCEntryStub stub(isolate());
   stub.GenerateCall(masm_, ip);
 
   // Drop the return address from the stack.
index 3d07d56375a09db964ad3b61f4217a1a496bd297..869ebc1a4c840f464c9efc39788ec7243b81bbeb 100644 (file)
@@ -833,7 +833,7 @@ void StubCompiler::GenerateFastApiCall(MacroAssembler* masm,
   __ mov(api_function_address, Operand(ref));
 
   // Jump to stub.
-  CallApiFunctionStub stub(is_store, call_data_undefined, argc);
+  CallApiFunctionStub stub(isolate, is_store, call_data_undefined, argc);
   __ TailCallStub(&stub);
 }
 
@@ -1033,12 +1033,14 @@ void LoadStubCompiler::GenerateLoadField(Register reg,
                                          Representation representation) {
   if (!reg.is(receiver())) __ mov(receiver(), reg);
   if (kind() == Code::LOAD_IC) {
-    LoadFieldStub stub(field.is_inobject(holder),
+    LoadFieldStub stub(isolate(),
+                       field.is_inobject(holder),
                        field.translate(holder),
                        representation);
     GenerateTailCall(masm(), stub.GetCode(isolate()));
   } else {
-    KeyedLoadFieldStub stub(field.is_inobject(holder),
+    KeyedLoadFieldStub stub(isolate(),
+                            field.is_inobject(holder),
                             field.translate(holder),
                             representation);
     GenerateTailCall(masm(), stub.GetCode(isolate()));
@@ -1095,7 +1097,7 @@ void LoadStubCompiler::GenerateLoadCallback(
   ExternalReference ref = ExternalReference(&fun, type, isolate());
   __ mov(getter_address_reg, Operand(ref));
 
-  CallApiGetterStub stub;
+  CallApiGetterStub stub(isolate());
   __ TailCallStub(&stub);
 }
 
index 3fbe67bb563ec232b537415b65dcac3cd1f12d3e..bada60088bb9633cb9d95fb5aaf59ad781f88731 100644 (file)
@@ -785,7 +785,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
       // No type feedback cell is available.
       __ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
 
-      CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
+      CallConstructStub stub(masm->isolate(), NO_CALL_FUNCTION_FLAGS);
       __ CallStub(&stub);
     } else {
       ParameterCount actual(x0);
index 127588267d791170d0a88f56e32374f5a94026b0..4ca2beaf22fbef049755774131bcf490aae99c2f 100644 (file)
@@ -534,10 +534,9 @@ void CallDescriptors::InitializeForIsolate(Isolate* isolate) {
 
 void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) {
   // Update the static counter each time a new code stub is generated.
-  Isolate* isolate = masm->isolate();
-  isolate->counters()->code_stubs()->Increment();
+  isolate()->counters()->code_stubs()->Increment();
 
-  CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(isolate);
+  CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(isolate());
   int param_count = descriptor->register_param_count_;
   {
     // Call the runtime system in a fresh internal frame.
@@ -1049,8 +1048,7 @@ void ICCompareStub::GenerateGeneric(MacroAssembler* masm) {
   __ JumpIfBothInstanceTypesAreNotSequentialAscii(lhs_type, rhs_type, x14,
                                                   x15, &slow);
 
-  Isolate* isolate = masm->isolate();
-  __ IncrementCounter(isolate->counters()->string_compare_native(), 1, x10,
+  __ IncrementCounter(isolate()->counters()->string_compare_native(), 1, x10,
                       x11);
   if (cond == eq) {
     StringCompareStub::GenerateFlatAsciiStringEquals(masm, lhs, rhs,
@@ -1112,10 +1110,9 @@ void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
   }
 
   AllowExternalCallThatCantCauseGC scope(masm);
-  __ Mov(x0, ExternalReference::isolate_address(masm->isolate()));
+  __ Mov(x0, ExternalReference::isolate_address(isolate()));
   __ CallCFunction(
-      ExternalReference::store_buffer_overflow_function(masm->isolate()),
-                                                        1, 0);
+      ExternalReference::store_buffer_overflow_function(isolate()), 1, 0);
 
   if (save_doubles_ == kSaveFPRegs) {
     __ PopCPURegList(saved_fp_regs);
@@ -1127,9 +1124,9 @@ void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
 
 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
     Isolate* isolate) {
-  StoreBufferOverflowStub stub1(kDontSaveFPRegs);
+  StoreBufferOverflowStub stub1(isolate, kDontSaveFPRegs);
   stub1.GetCode(isolate);
-  StoreBufferOverflowStub stub2(kSaveFPRegs);
+  StoreBufferOverflowStub stub2(isolate, kSaveFPRegs);
   stub2.GetCode(isolate);
 }
 
@@ -1314,7 +1311,7 @@ void MathPowStub::Generate(MacroAssembler* masm) {
       AllowExternalCallThatCantCauseGC scope(masm);
       __ Mov(saved_lr, lr);
       __ CallCFunction(
-          ExternalReference::power_double_double_function(masm->isolate()),
+          ExternalReference::power_double_double_function(isolate()),
           0, 2);
       __ Mov(lr, saved_lr);
       __ B(&done);
@@ -1397,7 +1394,7 @@ void MathPowStub::Generate(MacroAssembler* masm) {
                           result_double);
     ASSERT(result_tagged.is(x0));
     __ IncrementCounter(
-        masm->isolate()->counters()->math_pow(), 1, scratch0, scratch1);
+        isolate()->counters()->math_pow(), 1, scratch0, scratch1);
     __ Ret();
   } else {
     AllowExternalCallThatCantCauseGC scope(masm);
@@ -1405,12 +1402,12 @@ void MathPowStub::Generate(MacroAssembler* masm) {
     __ Fmov(base_double, base_double_copy);
     __ Scvtf(exponent_double, exponent_integer);
     __ CallCFunction(
-        ExternalReference::power_double_double_function(masm->isolate()),
+        ExternalReference::power_double_double_function(isolate()),
         0, 2);
     __ Mov(lr, saved_lr);
     __ Bind(&done);
     __ IncrementCounter(
-        masm->isolate()->counters()->math_pow(), 1, scratch0, scratch1);
+        isolate()->counters()->math_pow(), 1, scratch0, scratch1);
     __ Ret();
   }
 }
@@ -1434,17 +1431,17 @@ void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
 
 
 void StoreRegistersStateStub::GenerateAheadOfTime(Isolate* isolate) {
-  StoreRegistersStateStub stub1(kDontSaveFPRegs);
+  StoreRegistersStateStub stub1(isolate, kDontSaveFPRegs);
   stub1.GetCode(isolate);
-  StoreRegistersStateStub stub2(kSaveFPRegs);
+  StoreRegistersStateStub stub2(isolate, kSaveFPRegs);
   stub2.GetCode(isolate);
 }
 
 
 void RestoreRegistersStateStub::GenerateAheadOfTime(Isolate* isolate) {
-  RestoreRegistersStateStub stub1(kDontSaveFPRegs);
+  RestoreRegistersStateStub stub1(isolate, kDontSaveFPRegs);
   stub1.GetCode(isolate);
-  RestoreRegistersStateStub stub2(kSaveFPRegs);
+  RestoreRegistersStateStub stub2(isolate, kSaveFPRegs);
   stub2.GetCode(isolate);
 }
 
@@ -1470,9 +1467,9 @@ bool CEntryStub::NeedsImmovableCode() {
 
 
 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
-  CEntryStub stub(1, kDontSaveFPRegs);
+  CEntryStub stub(isolate, 1, kDontSaveFPRegs);
   stub.GetCode(isolate);
-  CEntryStub stub_fp(1, kSaveFPRegs);
+  CEntryStub stub_fp(isolate, 1, kSaveFPRegs);
   stub_fp.GetCode(isolate);
 }
 
@@ -1582,12 +1579,10 @@ void CEntryStub::Generate(MacroAssembler* masm) {
 
   ASSERT(csp.Is(__ StackPointer()));
 
-  Isolate* isolate = masm->isolate();
-
   // Prepare AAPCS64 arguments to pass to the builtin.
   __ Mov(x0, argc);
   __ Mov(x1, argv);
-  __ Mov(x2, ExternalReference::isolate_address(isolate));
+  __ Mov(x2, ExternalReference::isolate_address(isolate()));
 
   Label return_location;
   __ Adr(x12, &return_location);
@@ -1647,14 +1642,14 @@ void CEntryStub::Generate(MacroAssembler* masm) {
 
   // Retrieve the pending exception.
   ExternalReference pending_exception_address(
-      Isolate::kPendingExceptionAddress, isolate);
+      Isolate::kPendingExceptionAddress, isolate());
   const Register& exception = result;
   const Register& exception_address = x11;
   __ Mov(exception_address, Operand(pending_exception_address));
   __ Ldr(exception, MemOperand(exception_address));
 
   // Clear the pending exception.
-  __ Mov(x10, Operand(isolate->factory()->the_hole_value()));
+  __ Mov(x10, Operand(isolate()->factory()->the_hole_value()));
   __ Str(x10, MemOperand(exception_address));
 
   //  x0    exception   The exception descriptor.
@@ -1665,7 +1660,7 @@ void CEntryStub::Generate(MacroAssembler* masm) {
   // Special handling of termination exceptions, which are uncatchable by
   // JavaScript code.
   Label throw_termination_exception;
-  __ Cmp(exception, Operand(isolate->factory()->termination_exception()));
+  __ Cmp(exception, Operand(isolate()->factory()->termination_exception()));
   __ B(eq, &throw_termination_exception);
 
   // We didn't execute a return case, so the stack frame hasn't been updated
@@ -1725,14 +1720,11 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
   __ Fmov(fp_zero, 0.0);
 
   // Build an entry frame (see layout below).
-  Isolate* isolate = masm->isolate();
-
-  // Build an entry frame.
   int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
   int64_t bad_frame_pointer = -1L;  // Bad frame pointer to fail if it is used.
   __ Mov(x13, bad_frame_pointer);
   __ Mov(x12, Smi::FromInt(marker));
-  __ Mov(x11, ExternalReference(Isolate::kCEntryFPAddress, isolate));
+  __ Mov(x11, ExternalReference(Isolate::kCEntryFPAddress, isolate()));
   __ Ldr(x10, MemOperand(x11));
 
   __ Push(x13, xzr, x12, x10);
@@ -1742,7 +1734,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
   // Push the JS entry frame marker. Also set js_entry_sp if this is the
   // outermost JS call.
   Label non_outermost_js, done;
-  ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate);
+  ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate());
   __ Mov(x10, ExternalReference(js_entry_sp));
   __ Ldr(x11, MemOperand(x10));
   __ Cbnz(x11, &non_outermost_js);
@@ -1782,7 +1774,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
     // fp will be invalid because the PushTryHandler below sets it to 0 to
     // signal the existence of the JSEntry frame.
     __ Mov(x10, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
-           isolate)));
+                                          isolate())));
   }
   __ Str(code_entry, MemOperand(x10));
   __ LoadRoot(x0, Heap::kExceptionRootIndex);
@@ -1798,9 +1790,9 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
   // saved values before returning a failure to C.
 
   // Clear any pending exceptions.
-  __ Mov(x10, Operand(isolate->factory()->the_hole_value()));
+  __ Mov(x10, Operand(isolate()->factory()->the_hole_value()));
   __ Mov(x11, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
-                                        isolate)));
+                                        isolate())));
   __ Str(x10, MemOperand(x11));
 
   // Invoke the function by calling through the JS entry trampoline builtin.
@@ -1815,7 +1807,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
   // x4: argv.
   ExternalReference entry(is_construct ? Builtins::kJSConstructEntryTrampoline
                                        : Builtins::kJSEntryTrampoline,
-                          isolate);
+                          isolate());
   __ Mov(x10, entry);
 
   // Call the JSEntryTrampoline.
@@ -1848,7 +1840,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
 
   // Restore the top frame descriptors from the stack.
   __ Pop(x10);
-  __ Mov(x11, ExternalReference(Isolate::kCEntryFPAddress, isolate));
+  __ Mov(x11, ExternalReference(Isolate::kCEntryFPAddress, isolate()));
   __ Str(x10, MemOperand(x11));
 
   // Reset the stack to the callee saved registers.
@@ -1875,7 +1867,7 @@ void FunctionPrototypeStub::Generate(MacroAssembler* masm) {
     // -----------------------------------
     Register key = x0;
     receiver = x1;
-    __ Cmp(key, Operand(masm->isolate()->factory()->prototype_string()));
+    __ Cmp(key, Operand(isolate()->factory()->prototype_string()));
     __ B(ne, &miss);
   } else {
     ASSERT(kind() == Code::LOAD_IC);
@@ -2033,7 +2025,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
   __ Mov(result, res_false);
 
   // Null is not instance of anything.
-  __ Cmp(object_type, Operand(masm->isolate()->factory()->null_value()));
+  __ Cmp(object_type, Operand(isolate()->factory()->null_value()));
   __ B(ne, &object_not_null);
   __ Ret();
 
@@ -2634,11 +2626,10 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
   const int kJSRegExpOffset = 7 * kPointerSize;
 
   // Ensure that a RegExp stack is allocated.
-  Isolate* isolate = masm->isolate();
   ExternalReference address_of_regexp_stack_memory_address =
-      ExternalReference::address_of_regexp_stack_memory_address(isolate);
+      ExternalReference::address_of_regexp_stack_memory_address(isolate());
   ExternalReference address_of_regexp_stack_memory_size =
-      ExternalReference::address_of_regexp_stack_memory_size(isolate);
+      ExternalReference::address_of_regexp_stack_memory_size(isolate());
   __ Mov(x10, address_of_regexp_stack_memory_size);
   __ Ldr(x10, MemOperand(x10));
   __ Cbz(x10, &runtime);
@@ -2800,7 +2791,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
   __ JumpIfSmi(code_object, &runtime);
 
   // All checks done. Now push arguments for native regexp code.
-  __ IncrementCounter(isolate->counters()->regexp_entry_native(), 1,
+  __ IncrementCounter(isolate()->counters()->regexp_entry_native(), 1,
                       x10,
                       x11);
 
@@ -2816,7 +2807,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
   // csp[0]: Space for the return address placed by DirectCEntryStub.
   // csp[8]: Argument 9, the current isolate address.
 
-  __ Mov(x10, ExternalReference::isolate_address(isolate));
+  __ Mov(x10, ExternalReference::isolate_address(isolate()));
   __ Poke(x10, kPointerSize);
 
   Register length = w11;
@@ -2865,7 +2856,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
   __ Add(x3, x2, Operand(w10, UXTW));
 
   // Argument 5 (x4): static offsets vector buffer.
-  __ Mov(x4, ExternalReference::address_of_static_offsets_vector(isolate));
+  __ Mov(x4, ExternalReference::address_of_static_offsets_vector(isolate()));
 
   // Argument 6 (x5): Set the number of capture registers to zero to force
   // global regexps to behave as non-global. This stub is not used for global
@@ -2884,7 +2875,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
 
   // Locate the code entry and call it.
   __ Add(code_object, code_object, Code::kHeaderSize - kHeapObjectTag);
-  DirectCEntryStub stub;
+  DirectCEntryStub stub(isolate());
   stub.GenerateCall(masm, code_object);
 
   __ LeaveExitFrame(false, x10, true);
@@ -2970,7 +2961,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
   // Get the static offsets vector filled by the native regexp code
   // and fill the last match info.
   ExternalReference address_of_static_offsets_vector =
-      ExternalReference::address_of_static_offsets_vector(isolate);
+      ExternalReference::address_of_static_offsets_vector(isolate());
   __ Mov(offsets_vector_index, address_of_static_offsets_vector);
 
   Label next_capture, done;
@@ -3009,10 +3000,10 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
   // A stack overflow (on the backtrack stack) may have occured
   // in the RegExp code but no exception has been created yet.
   // If there is no pending exception, handle that in the runtime system.
-  __ Mov(x10, Operand(isolate->factory()->the_hole_value()));
+  __ Mov(x10, Operand(isolate()->factory()->the_hole_value()));
   __ Mov(x11,
          Operand(ExternalReference(Isolate::kPendingExceptionAddress,
-                                   isolate)));
+                                   isolate())));
   __ Ldr(exception_value, MemOperand(x11));
   __ Cmp(x10, exception_value);
   __ B(eq, &runtime);
@@ -3031,7 +3022,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
   __ ThrowUncatchable(exception_value, x10, x11, x12, x13);
 
   __ Bind(&failure);
-  __ Mov(x0, Operand(masm->isolate()->factory()->null_value()));
+  __ Mov(x0, Operand(isolate()->factory()->null_value()));
   __ PopCPURegList(used_callee_saved_registers);
   // Drop the 4 arguments of the stub from the stack.
   __ Drop(4);
@@ -3161,7 +3152,7 @@ static void GenerateRecordCallTarget(MacroAssembler* masm,
     // slot.
     {
       FrameScope scope(masm, StackFrame::INTERNAL);
-      CreateAllocationSiteStub create_stub;
+      CreateAllocationSiteStub create_stub(masm->isolate());
 
       // Arguments register must be smi-tagged to call out.
       __ SmiTag(argc);
@@ -3265,8 +3256,8 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
       // If there is a call target cache, mark it megamorphic in the
       // non-function case. MegamorphicSentinel is an immortal immovable object
       // (megamorphic symbol) so no write barrier is needed.
-      ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(masm->isolate()),
-                masm->isolate()->heap()->megamorphic_symbol());
+      ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(isolate()),
+                isolate()->heap()->megamorphic_symbol());
       __ Add(x12, cache_cell, Operand::UntagSmiAndScale(slot,
                                                         kPointerSizeLog2));
       __ LoadRoot(x11, Heap::kMegamorphicSymbolRootIndex);
@@ -3281,7 +3272,7 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
     __ GetBuiltinFunction(x1, Builtins::CALL_FUNCTION_PROXY);
     {
       Handle<Code> adaptor =
-          masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
+          isolate()->builtins()->ArgumentsAdaptorTrampoline();
       __ Jump(adaptor, RelocInfo::CODE_TARGET);
     }
 
@@ -3292,7 +3283,7 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
     __ Mov(x0, argc_);  // Set up the number of arguments.
     __ Mov(x2, 0);
     __ GetBuiltinFunction(function, Builtins::CALL_NON_FUNCTION);
-    __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
+    __ Jump(isolate()->builtins()->ArgumentsAdaptorTrampoline(),
             RelocInfo::CODE_TARGET);
   }
 
@@ -3375,7 +3366,7 @@ void CallConstructStub::Generate(MacroAssembler* masm) {
   __ Bind(&do_call);
   // Set expected number of arguments to zero (not changing x0).
   __ Mov(x2, 0);
-  __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
+  __ Jump(isolate()->builtins()->ArgumentsAdaptorTrampoline(),
           RelocInfo::CODE_TARGET);
 }
 
@@ -3565,9 +3556,9 @@ void ICCompareStub::GenerateNumbers(MacroAssembler* masm) {
   __ Ret();
 
   __ Bind(&unordered);
-  ICCompareStub stub(op_, CompareIC::GENERIC, CompareIC::GENERIC,
+  ICCompareStub stub(isolate(), op_, CompareIC::GENERIC, CompareIC::GENERIC,
                      CompareIC::GENERIC);
-  __ Jump(stub.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
+  __ Jump(stub.GetCode(isolate()), RelocInfo::CODE_TARGET);
 
   __ Bind(&maybe_undefined1);
   if (Token::IsOrderedRelationalCompareOp(op_)) {
@@ -3807,7 +3798,7 @@ void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
   Register stub_entry = x11;
   {
     ExternalReference miss =
-      ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate());
+      ExternalReference(IC_Utility(IC::kCompareIC_Miss), isolate());
 
     FrameScope scope(masm, StackFrame::INTERNAL);
     Register op = x10;
@@ -4121,7 +4112,7 @@ void SubStringStub::Generate(MacroAssembler* masm) {
   __ CopyBytes(result_char0, substring_char0, result_length, x3, kCopyLong);
 
   __ Bind(&return_x0);
-  Counters* counters = masm->isolate()->counters();
+  Counters* counters = isolate()->counters();
   __ IncrementCounter(counters->sub_string_native(), 1, x3, x4);
   __ Drop(3);
   __ Ret();
@@ -4268,7 +4259,7 @@ void StringCompareStub::GenerateAsciiCharsCompareLoop(
 void StringCompareStub::Generate(MacroAssembler* masm) {
   Label runtime;
 
-  Counters* counters = masm->isolate()->counters();
+  Counters* counters = isolate()->counters();
 
   // Stack frame on entry.
   //  sp[0]: right string
@@ -4314,12 +4305,11 @@ void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
   //  -- x0    : right
   //  -- lr    : return address
   // -----------------------------------
-  Isolate* isolate = masm->isolate();
 
   // Load x2 with the allocation site.  We stick an undefined dummy value here
   // and replace it with the real allocation site later when we instantiate this
   // stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate().
-  __ LoadObject(x2, handle(isolate->heap()->undefined_value()));
+  __ LoadObject(x2, handle(isolate()->heap()->undefined_value()));
 
   // Make sure that we actually patched the allocation site.
   if (FLAG_debug_code) {
@@ -4331,7 +4321,7 @@ void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
 
   // Tail call into the stub that handles binary operations with allocation
   // sites.
-  BinaryOpWithAllocationSiteStub stub(state_);
+  BinaryOpWithAllocationSiteStub stub(isolate(), state_);
   __ TailCallStub(&stub);
 }
 
@@ -4392,12 +4382,12 @@ void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) {
   __ Mov(address, regs_.address());
   __ Mov(x0, regs_.object());
   __ Mov(x1, address);
-  __ Mov(x2, ExternalReference::isolate_address(masm->isolate()));
+  __ Mov(x2, ExternalReference::isolate_address(isolate()));
 
   AllowExternalCallThatCantCauseGC scope(masm);
   ExternalReference function =
       ExternalReference::incremental_marking_record_write_function(
-          masm->isolate());
+          isolate());
   __ CallCFunction(function, 3, 0);
 
   regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_);
@@ -4590,8 +4580,8 @@ void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
 
 
 void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
-  CEntryStub ces(1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs);
-  __ Call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
+  CEntryStub ces(isolate(), 1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs);
+  __ Call(ces.GetCode(isolate()), RelocInfo::CODE_TARGET);
   int parameter_count_offset =
       StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset;
   __ Ldr(x1, MemOperand(fp, parameter_count_offset));
@@ -4613,7 +4603,7 @@ static const unsigned int kProfileEntryHookCallSize =
 
 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
   if (masm->isolate()->function_entry_hook() != NULL) {
-    ProfileEntryHookStub stub;
+    ProfileEntryHookStub stub(masm->isolate());
     Assembler::BlockConstPoolScope no_const_pools(masm);
     Label entry_hook_call_start;
     __ Bind(&entry_hook_call_start);
@@ -4642,7 +4632,7 @@ void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
 
 #if V8_HOST_ARCH_ARM64
   uintptr_t entry_hook =
-      reinterpret_cast<uintptr_t>(masm->isolate()->function_entry_hook());
+      reinterpret_cast<uintptr_t>(isolate()->function_entry_hook());
   __ Mov(x10, entry_hook);
 #else
   // Under the simulator we need to indirect the entry hook through a trampoline
@@ -4650,9 +4640,9 @@ void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
   ApiFunction dispatcher(FUNCTION_ADDR(EntryHookTrampoline));
   __ Mov(x10, Operand(ExternalReference(&dispatcher,
                                         ExternalReference::BUILTIN_CALL,
-                                        masm->isolate())));
+                                        isolate())));
   // It additionally takes an isolate as a third parameter
-  __ Mov(x2, ExternalReference::isolate_address(masm->isolate()));
+  __ Mov(x2, ExternalReference::isolate_address(isolate()));
 #endif
 
   // The caller's return address is above the saved temporaries.
@@ -4697,7 +4687,7 @@ void DirectCEntryStub::GenerateCall(MacroAssembler* masm,
   ASSERT(csp.Is(__ StackPointer()));
 
   intptr_t code =
-      reinterpret_cast<intptr_t>(GetCode(masm->isolate()).location());
+      reinterpret_cast<intptr_t>(GetCode(isolate()).location());
   __ Mov(lr, Operand(code, RelocInfo::CODE_TARGET));
   __ Mov(x10, target);
   // Branch to the stub.
@@ -4776,7 +4766,7 @@ void NameDictionaryLookupStub::GeneratePositiveLookup(
   }
 
   Label not_found;
-  NameDictionaryLookupStub stub(POSITIVE_LOOKUP);
+  NameDictionaryLookupStub stub(masm->isolate(), POSITIVE_LOOKUP);
   __ CallStub(&stub);
   __ Cbz(x0, &not_found);
   __ Mov(scratch2, x2);  // Move entry index into scratch2.
@@ -4847,7 +4837,7 @@ void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
 
   __ Ldr(x0, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
   __ Mov(x1, Operand(name));
-  NameDictionaryLookupStub stub(NEGATIVE_LOOKUP);
+  NameDictionaryLookupStub stub(masm->isolate(), NEGATIVE_LOOKUP);
   __ CallStub(&stub);
   // Move stub return value to scratch0. Note that scratch0 is not included in
   // spill_list and won't be clobbered by PopCPURegList.
@@ -4950,7 +4940,7 @@ static void CreateArrayDispatch(MacroAssembler* masm,
                                 AllocationSiteOverrideMode mode) {
   ASM_LOCATION("CreateArrayDispatch");
   if (mode == DISABLE_ALLOCATION_SITES) {
-    T stub(GetInitialFastElementsKind(), mode);
+    T stub(masm->isolate(), GetInitialFastElementsKind(), mode);
      __ TailCallStub(&stub);
 
   } else if (mode == DONT_OVERRIDE) {
@@ -4963,7 +4953,7 @@ static void CreateArrayDispatch(MacroAssembler* masm,
       // TODO(jbramley): Is this the best way to handle this? Can we make the
       // tail calls conditional, rather than hopping over each one?
       __ CompareAndBranch(kind, candidate_kind, ne, &next);
-      T stub(candidate_kind);
+      T stub(masm->isolate(), candidate_kind);
       __ TailCallStub(&stub);
       __ Bind(&next);
     }
@@ -5013,12 +5003,14 @@ static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
     ElementsKind initial = GetInitialFastElementsKind();
     ElementsKind holey_initial = GetHoleyElementsKind(initial);
 
-    ArraySingleArgumentConstructorStub stub_holey(holey_initial,
+    ArraySingleArgumentConstructorStub stub_holey(masm->isolate(),
+                                                  holey_initial,
                                                   DISABLE_ALLOCATION_SITES);
     __ TailCallStub(&stub_holey);
 
     __ Bind(&normal_sequence);
-    ArraySingleArgumentConstructorStub stub(initial,
+    ArraySingleArgumentConstructorStub stub(masm->isolate(),
+                                            initial,
                                             DISABLE_ALLOCATION_SITES);
     __ TailCallStub(&stub);
   } else if (mode == DONT_OVERRIDE) {
@@ -5050,7 +5042,7 @@ static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
       Label next;
       ElementsKind candidate_kind = GetFastElementsKindFromSequenceIndex(i);
       __ CompareAndBranch(kind, candidate_kind, ne, &next);
-      ArraySingleArgumentConstructorStub stub(candidate_kind);
+      ArraySingleArgumentConstructorStub stub(masm->isolate(), candidate_kind);
       __ TailCallStub(&stub);
       __ Bind(&next);
     }
@@ -5069,10 +5061,10 @@ static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
       TERMINAL_FAST_ELEMENTS_KIND);
   for (int i = 0; i <= to_index; ++i) {
     ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
-    T stub(kind);
+    T stub(isolate, kind);
     stub.GetCode(isolate);
     if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) {
-      T stub1(kind, DISABLE_ALLOCATION_SITES);
+      T stub1(isolate, kind, DISABLE_ALLOCATION_SITES);
       stub1.GetCode(isolate);
     }
   }
@@ -5094,11 +5086,11 @@ void InternalArrayConstructorStubBase::GenerateStubsAheadOfTime(
   ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS };
   for (int i = 0; i < 2; i++) {
     // For internal arrays we only need a few things
-    InternalArrayNoArgumentConstructorStub stubh1(kinds[i]);
+    InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[i]);
     stubh1.GetCode(isolate);
-    InternalArraySingleArgumentConstructorStub stubh2(kinds[i]);
+    InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]);
     stubh2.GetCode(isolate);
-    InternalArrayNArgumentsConstructorStub stubh3(kinds[i]);
+    InternalArrayNArgumentsConstructorStub stubh3(isolate, kinds[i]);
     stubh3.GetCode(isolate);
   }
 }
@@ -5202,22 +5194,22 @@ void InternalArrayConstructorStub::GenerateCase(
     __ Cbz(x10, &packed_case);
 
     InternalArraySingleArgumentConstructorStub
-        stub1_holey(GetHoleyElementsKind(kind));
+        stub1_holey(isolate(), GetHoleyElementsKind(kind));
     __ TailCallStub(&stub1_holey);
 
     __ Bind(&packed_case);
   }
-  InternalArraySingleArgumentConstructorStub stub1(kind);
+  InternalArraySingleArgumentConstructorStub stub1(isolate(), kind);
   __ TailCallStub(&stub1);
 
   __ Bind(&zero_case);
   // No arguments.
-  InternalArrayNoArgumentConstructorStub stub0(kind);
+  InternalArrayNoArgumentConstructorStub stub0(isolate(), kind);
   __ TailCallStub(&stub0);
 
   __ Bind(&n_case);
   // N arguments.
-  InternalArrayNArgumentsConstructorStub stubN(kind);
+  InternalArrayNArgumentsConstructorStub stubN(isolate(), kind);
   __ TailCallStub(&stubN);
 }
 
@@ -5229,8 +5221,6 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
   //  -- sp[0] : return address
   //  -- sp[4] : last argument
   // -----------------------------------
-  Handle<Object> undefined_sentinel(
-      masm->isolate()->heap()->undefined_value(), masm->isolate());
 
   Register constructor = x1;
 
@@ -5309,8 +5299,6 @@ void CallApiFunctionStub::Generate(MacroAssembler* masm) {
   STATIC_ASSERT(FCA::kHolderIndex == 0);
   STATIC_ASSERT(FCA::kArgsLength == 7);
 
-  Isolate* isolate = masm->isolate();
-
   // FunctionCallbackArguments: context, callee and call data.
   __ Push(context, callee, call_data);
 
@@ -5321,7 +5309,7 @@ void CallApiFunctionStub::Generate(MacroAssembler* masm) {
     __ LoadRoot(call_data, Heap::kUndefinedValueRootIndex);
   }
   Register isolate_reg = x5;
-  __ Mov(isolate_reg, ExternalReference::isolate_address(isolate));
+  __ Mov(isolate_reg, ExternalReference::isolate_address(isolate()));
 
   // FunctionCallbackArguments:
   //    return value, return value default, isolate, holder.
@@ -5359,7 +5347,7 @@ void CallApiFunctionStub::Generate(MacroAssembler* masm) {
   ExternalReference::Type thunk_type = ExternalReference::PROFILING_API_CALL;
   ApiFunction thunk_fun(thunk_address);
   ExternalReference thunk_ref = ExternalReference(&thunk_fun, thunk_type,
-      masm->isolate());
+      isolate());
 
   AllowExternalCallThatCantCauseGC scope(masm);
   MemOperand context_restore_operand(
@@ -5417,7 +5405,7 @@ void CallApiGetterStub::Generate(MacroAssembler* masm) {
       ExternalReference::PROFILING_GETTER_CALL;
   ApiFunction thunk_fun(thunk_address);
   ExternalReference thunk_ref = ExternalReference(&thunk_fun, thunk_type,
-      masm->isolate());
+      isolate());
 
   const int spill_offset = 1 + kApiStackSpace;
   __ CallApiFunctionAndReturn(api_function_address,
index f290a088ce03d07f29f087973d54b9f7047cf4d1..73624779aeddb81b9d4d0e61f522c4b6f39a0d90 100644 (file)
@@ -39,8 +39,8 @@ void ArrayNativeCode(MacroAssembler* masm, Label* call_generic_code);
 
 class StoreBufferOverflowStub: public PlatformCodeStub {
  public:
-  explicit StoreBufferOverflowStub(SaveFPRegsMode save_fp)
-      : save_doubles_(save_fp) { }
+  StoreBufferOverflowStub(Isolate* isolate, SaveFPRegsMode save_fp)
+      : PlatformCodeStub(isolate), save_doubles_(save_fp) { }
 
   void Generate(MacroAssembler* masm);
 
@@ -79,8 +79,8 @@ class StringHelper : public AllStatic {
 
 class StoreRegistersStateStub: public PlatformCodeStub {
  public:
-  explicit StoreRegistersStateStub(SaveFPRegsMode with_fp)
-      : save_doubles_(with_fp) {}
+  StoreRegistersStateStub(Isolate* isolate, SaveFPRegsMode with_fp)
+      : PlatformCodeStub(isolate), save_doubles_(with_fp) {}
 
   static Register to_be_pushed_lr() { return ip0; }
   static void GenerateAheadOfTime(Isolate* isolate);
@@ -95,8 +95,8 @@ class StoreRegistersStateStub: public PlatformCodeStub {
 
 class RestoreRegistersStateStub: public PlatformCodeStub {
  public:
-  explicit RestoreRegistersStateStub(SaveFPRegsMode with_fp)
-      : save_doubles_(with_fp) {}
+  RestoreRegistersStateStub(Isolate* isolate, SaveFPRegsMode with_fp)
+      : PlatformCodeStub(isolate), save_doubles_(with_fp) {}
 
   static void GenerateAheadOfTime(Isolate* isolate);
  private:
@@ -113,12 +113,14 @@ class RecordWriteStub: public PlatformCodeStub {
   // Stub to record the write of 'value' at 'address' in 'object'.
   // Typically 'address' = 'object' + <some offset>.
   // See MacroAssembler::RecordWriteField() for example.
-  RecordWriteStub(Register object,
+  RecordWriteStub(Isolate* isolate,
+                  Register object,
                   Register value,
                   Register address,
                   RememberedSetAction remembered_set_action,
                   SaveFPRegsMode fp_mode)
-      : object_(object),
+      : PlatformCodeStub(isolate),
+        object_(object),
         value_(value),
         address_(address),
         remembered_set_action_(remembered_set_action),
@@ -370,7 +372,7 @@ class RecordWriteStub: public PlatformCodeStub {
 // the exit frame before doing the call with GenerateCall.
 class DirectCEntryStub: public PlatformCodeStub {
  public:
-  DirectCEntryStub() {}
+  explicit DirectCEntryStub(Isolate* isolate) : PlatformCodeStub(isolate) {}
   void Generate(MacroAssembler* masm);
   void GenerateCall(MacroAssembler* masm, Register target);
 
@@ -386,7 +388,8 @@ class NameDictionaryLookupStub: public PlatformCodeStub {
  public:
   enum LookupMode { POSITIVE_LOOKUP, NEGATIVE_LOOKUP };
 
-  explicit NameDictionaryLookupStub(LookupMode mode) : mode_(mode) { }
+  NameDictionaryLookupStub(Isolate* isolate, LookupMode mode)
+      : PlatformCodeStub(isolate), mode_(mode) { }
 
   void Generate(MacroAssembler* masm);
 
@@ -434,7 +437,7 @@ class NameDictionaryLookupStub: public PlatformCodeStub {
 
 class SubStringStub: public PlatformCodeStub {
  public:
-  SubStringStub() {}
+  explicit SubStringStub(Isolate* isolate) : PlatformCodeStub(isolate) {}
 
  private:
   Major MajorKey() { return SubString; }
@@ -446,7 +449,7 @@ class SubStringStub: public PlatformCodeStub {
 
 class StringCompareStub: public PlatformCodeStub {
  public:
-  StringCompareStub() { }
+  explicit StringCompareStub(Isolate* isolate) : PlatformCodeStub(isolate) { }
 
   // Compares two flat ASCII strings and returns result in x0.
   static void GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
index 716337f0519a4cea7bf11c6d2ecd5f695b83ed01..f1ed732612c11e33a4b62f88e20fe3e63729d480 100644 (file)
@@ -204,7 +204,7 @@ static void Generate_DebugBreakCallHelper(MacroAssembler* masm,
     __ Mov(x0, 0);  // No arguments.
     __ Mov(x1, ExternalReference::debug_break(masm->isolate()));
 
-    CEntryStub stub(1);
+    CEntryStub stub(masm->isolate(), 1);
     __ CallStub(&stub);
 
     // Restore the register values from the expression stack.
index 76bb1b54e7a859137f2489d0838e43f737d851ab..1316a8193e21b4106d26ab267067faa7c89660e3 100644 (file)
@@ -241,7 +241,7 @@ void FullCodeGenerator::Generate() {
       __ Push(x1, x10);
       __ CallRuntime(Runtime::kHiddenNewGlobalContext, 2);
     } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
-      FastNewContextStub stub(heap_slots);
+      FastNewContextStub stub(isolate(), heap_slots);
       __ CallStub(&stub);
     } else {
       __ Push(x1);
@@ -301,7 +301,7 @@ void FullCodeGenerator::Generate() {
     } else {
       type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
     }
-    ArgumentsAccessStub stub(type);
+    ArgumentsAccessStub stub(isolate(), type);
     __ CallStub(&stub);
 
     SetVar(arguments, x0, x1, x2);
@@ -1363,7 +1363,9 @@ void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
       !pretenure &&
       scope()->is_function_scope() &&
       info->num_literals() == 0) {
-    FastNewClosureStub stub(info->strict_mode(), info->is_generator());
+    FastNewClosureStub stub(isolate(),
+                            info->strict_mode(),
+                            info->is_generator());
     __ Mov(x2, Operand(info));
     __ CallStub(&stub);
   } else {
@@ -1682,7 +1684,7 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
     __ Push(x3, x2, x1, x0);
     __ CallRuntime(Runtime::kHiddenCreateObjectLiteral, 4);
   } else {
-    FastCloneShallowObjectStub stub(properties_count);
+    FastCloneShallowObjectStub stub(isolate(), properties_count);
     __ CallStub(&stub);
   }
 
@@ -1820,6 +1822,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
   if (has_fast_elements && constant_elements_values->map() ==
       isolate()->heap()->fixed_cow_array_map()) {
     FastCloneShallowArrayStub stub(
+        isolate(),
         FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
         allocation_site_mode,
         length);
@@ -1841,7 +1844,10 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
       mode = FastCloneShallowArrayStub::CLONE_ELEMENTS;
     }
 
-    FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
+    FastCloneShallowArrayStub stub(isolate(),
+                                   mode,
+                                   allocation_site_mode,
+                                   length);
     __ CallStub(&stub);
   }
 
@@ -1873,7 +1879,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
                           EMIT_REMEMBERED_SET, INLINE_SMI_CHECK);
     } else {
       __ Mov(x3, Smi::FromInt(i));
-      StoreArrayLiteralElementStub stub;
+      StoreArrayLiteralElementStub stub(isolate());
       __ CallStub(&stub);
     }
 
@@ -2034,7 +2040,7 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
   patch_site.EmitJumpIfSmi(x10, &both_smis);
 
   __ Bind(&stub_call);
-  BinaryOpICStub stub(op, mode);
+  BinaryOpICStub stub(isolate(), op, mode);
   {
     Assembler::BlockPoolsScope scope(masm_);
     CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
@@ -2119,7 +2125,7 @@ void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
                                      Token::Value op,
                                      OverwriteMode mode) {
   __ Pop(x1);
-  BinaryOpICStub stub(op, mode);
+  BinaryOpICStub stub(isolate(), op, mode);
   JumpPatchSite patch_site(masm_);    // Unbound, signals no inlined smi code.
   {
     Assembler::BlockPoolsScope scope(masm_);
@@ -2376,7 +2382,7 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr) {
 
   // Record source position for debugger.
   SetSourcePosition(expr->position());
-  CallFunctionStub stub(arg_count, flags);
+  CallFunctionStub stub(isolate(), arg_count, flags);
   __ Peek(x1, (arg_count + 1) * kPointerSize);
   __ CallStub(&stub);
 
@@ -2417,7 +2423,7 @@ void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
 
   // Record source position for debugger.
   SetSourcePosition(expr->position());
-  CallFunctionStub stub(arg_count, CALL_AS_METHOD);
+  CallFunctionStub stub(isolate(), arg_count, CALL_AS_METHOD);
   __ Peek(x1, (arg_count + 1) * kPointerSize);
   __ CallStub(&stub);
 
@@ -2448,7 +2454,7 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr) {
   __ Mov(x3, Smi::FromInt(expr->CallFeedbackSlot()));
 
   // Record call targets in unoptimized code.
-  CallFunctionStub stub(arg_count, RECORD_CALL_TARGET);
+  CallFunctionStub stub(isolate(), arg_count, RECORD_CALL_TARGET);
   __ Peek(x1, (arg_count + 1) * kXRegSize);
   __ CallStub(&stub);
   RecordJSReturnSite(expr);
@@ -2533,7 +2539,7 @@ void FullCodeGenerator::VisitCall(Call* expr) {
     SetSourcePosition(expr->position());
 
     // Call the evaluated function.
-    CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS);
+    CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
     __ Peek(x1, (arg_count + 1) * kXRegSize);
     __ CallStub(&stub);
     RecordJSReturnSite(expr);
@@ -2653,7 +2659,7 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
   __ LoadObject(x2, FeedbackVector());
   __ Mov(x3, Smi::FromInt(expr->CallNewFeedbackSlot()));
 
-  CallConstructStub stub(RECORD_CALL_TARGET);
+  CallConstructStub stub(isolate(), RECORD_CALL_TARGET);
   __ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL);
   PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
   context()->Plug(x0);
@@ -3037,7 +3043,7 @@ void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
   VisitForAccumulatorValue(args->at(0));
   __ Mov(x1, x0);
   __ Mov(x0, Smi::FromInt(info_->scope()->num_parameters()));
-  ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
+  ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
   __ CallStub(&stub);
   context()->Plug(x0);
 }
@@ -3152,7 +3158,7 @@ void FullCodeGenerator::EmitLog(CallRuntime* expr) {
 
 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
   // Load the arguments on the stack and call the stub.
-  SubStringStub stub;
+  SubStringStub stub(isolate());
   ZoneList<Expression*>* args = expr->arguments();
   ASSERT(args->length() == 3);
   VisitForStackValue(args->at(0));
@@ -3165,7 +3171,7 @@ void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
 
 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
   // Load the arguments on the stack and call the stub.
-  RegExpExecStub stub;
+  RegExpExecStub stub(isolate());
   ZoneList<Expression*>* args = expr->arguments();
   ASSERT(args->length() == 4);
   VisitForStackValue(args->at(0));
@@ -3307,7 +3313,7 @@ void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
   ASSERT(args->length() == 2);
   VisitForStackValue(args->at(0));
   VisitForStackValue(args->at(1));
-  MathPowStub stub(MathPowStub::ON_STACK);
+  MathPowStub stub(isolate(), MathPowStub::ON_STACK);
   __ CallStub(&stub);
   context()->Plug(x0);
 }
@@ -3349,7 +3355,7 @@ void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
   // Load the argument into x0 and call the stub.
   VisitForAccumulatorValue(args->at(0));
 
-  NumberToStringStub stub;
+  NumberToStringStub stub(isolate());
   __ CallStub(&stub);
   context()->Plug(x0);
 }
@@ -3477,7 +3483,7 @@ void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
   VisitForAccumulatorValue(args->at(1));
 
   __ Pop(x1);
-  StringAddStub stub(STRING_ADD_CHECK_BOTH, NOT_TENURED);
+  StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
   __ CallStub(&stub);
 
   context()->Plug(x0);
@@ -3490,7 +3496,7 @@ void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
   VisitForStackValue(args->at(0));
   VisitForStackValue(args->at(1));
 
-  StringCompareStub stub;
+  StringCompareStub stub(isolate());
   __ CallStub(&stub);
   context()->Plug(x0);
 }
@@ -3529,7 +3535,7 @@ void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
 
 
 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
-  RegExpConstructResultStub stub;
+  RegExpConstructResultStub stub(isolate());
   ZoneList<Expression*>* args = expr->arguments();
   ASSERT(args->length() == 3);
   VisitForStackValue(args->at(0));
@@ -3873,7 +3879,7 @@ void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
 
     // Record source position of the IC call.
     SetSourcePosition(expr->position());
-    CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS);
+    CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
     __ Peek(x1, (arg_count + 1) * kPointerSize);
     __ CallStub(&stub);
 
@@ -4091,7 +4097,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
     __ B(&stub_call);
     __ Bind(&slow);
   }
-  ToNumberStub convert_stub;
+  ToNumberStub convert_stub(isolate());
   __ CallStub(&convert_stub);
 
   // Save result for postfix expressions.
@@ -4123,7 +4129,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
 
   {
     Assembler::BlockPoolsScope scope(masm_);
-    BinaryOpICStub stub(Token::ADD, NO_OVERWRITE);
+    BinaryOpICStub stub(isolate(), Token::ADD, NO_OVERWRITE);
     CallIC(stub.GetCode(isolate()), expr->CountBinOpFeedbackId());
     patch_site.EmitPatchInfo();
   }
@@ -4345,7 +4351,7 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
 
     case Token::INSTANCEOF: {
       VisitForStackValue(expr->right());
-      InstanceofStub stub(InstanceofStub::kNoFlags);
+      InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
       __ CallStub(&stub);
       PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
       // The stub returns 0 for true.
@@ -4553,7 +4559,7 @@ void FullCodeGenerator::VisitYield(Yield* expr) {
       CallIC(ic, TypeFeedbackId::None());
       __ Mov(x1, x0);
       __ Poke(x1, 2 * kPointerSize);
-      CallFunctionStub stub(1, CALL_AS_METHOD);
+      CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
       __ CallStub(&stub);
 
       __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
index 969af2b06da77b0645b32e5f3310a78d249b3d23..46e038537869493fcbd70cc6fd104b445273819d 100644 (file)
@@ -430,7 +430,7 @@ void LCodeGen::DoCallFunction(LCallFunction* instr) {
   ASSERT(ToRegister(instr->result()).Is(x0));
 
   int arity = instr->arity();
-  CallFunctionStub stub(arity, instr->hydrogen()->function_flags());
+  CallFunctionStub stub(isolate(), arity, instr->hydrogen()->function_flags());
   CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
 }
 
@@ -444,7 +444,7 @@ void LCodeGen::DoCallNew(LCallNew* instr) {
   // No cell in x2 for construct type feedback in optimized code.
   __ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
 
-  CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
+  CallConstructStub stub(isolate(), NO_CALL_FUNCTION_FLAGS);
   CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
 
   ASSERT(ToRegister(instr->result()).is(x0));
@@ -466,7 +466,7 @@ void LCodeGen::DoCallNewArray(LCallNewArray* instr) {
           : DONT_OVERRIDE;
 
   if (instr->arity() == 0) {
-    ArrayNoArgumentConstructorStub stub(kind, override_mode);
+    ArrayNoArgumentConstructorStub stub(isolate(), kind, override_mode);
     CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
   } else if (instr->arity() == 1) {
     Label done;
@@ -478,17 +478,19 @@ void LCodeGen::DoCallNewArray(LCallNewArray* instr) {
       __ Cbz(x10, &packed_case);
 
       ElementsKind holey_kind = GetHoleyElementsKind(kind);
-      ArraySingleArgumentConstructorStub stub(holey_kind, override_mode);
+      ArraySingleArgumentConstructorStub stub(isolate(),
+                                              holey_kind,
+                                              override_mode);
       CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
       __ B(&done);
       __ Bind(&packed_case);
     }
 
-    ArraySingleArgumentConstructorStub stub(kind, override_mode);
+    ArraySingleArgumentConstructorStub stub(isolate(), kind, override_mode);
     CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
     __ Bind(&done);
   } else {
-    ArrayNArgumentsConstructorStub stub(kind, override_mode);
+    ArrayNArgumentsConstructorStub stub(isolate(), kind, override_mode);
     CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
   }
 
@@ -710,7 +712,7 @@ bool LCodeGen::GeneratePrologue() {
     Comment(";;; Allocate local context");
     // Argument to NewContext is the function, which is in x1.
     if (heap_slots <= FastNewContextStub::kMaximumSlots) {
-      FastNewContextStub stub(heap_slots);
+      FastNewContextStub stub(isolate(), heap_slots);
       __ CallStub(&stub);
     } else {
       __ Push(x1);
@@ -1738,7 +1740,7 @@ void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
   ASSERT(ToRegister(instr->right()).is(x0));
   ASSERT(ToRegister(instr->result()).is(x0));
 
-  BinaryOpICStub stub(instr->op(), NO_OVERWRITE);
+  BinaryOpICStub stub(isolate(), instr->op(), NO_OVERWRITE);
   CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
 }
 
@@ -2049,17 +2051,17 @@ void LCodeGen::DoCallStub(LCallStub* instr) {
   ASSERT(ToRegister(instr->result()).is(x0));
   switch (instr->hydrogen()->major_key()) {
     case CodeStub::RegExpExec: {
-      RegExpExecStub stub;
+      RegExpExecStub stub(isolate());
       CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
       break;
     }
     case CodeStub::SubString: {
-      SubStringStub stub;
+      SubStringStub stub(isolate());
       CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
       break;
     }
     case CodeStub::StringCompare: {
-      StringCompareStub stub;
+      StringCompareStub stub(isolate());
       CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
       break;
     }
@@ -2788,7 +2790,8 @@ void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
   // space for nested functions that don't need literals cloning.
   bool pretenure = instr->hydrogen()->pretenure();
   if (!pretenure && instr->hydrogen()->has_no_literals()) {
-    FastNewClosureStub stub(instr->hydrogen()->strict_mode(),
+    FastNewClosureStub stub(isolate(),
+                            instr->hydrogen()->strict_mode(),
                             instr->hydrogen()->is_generator());
     __ Mov(x2, Operand(instr->hydrogen()->shared_info()));
     CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
@@ -2963,7 +2966,7 @@ void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
   ASSERT(ToRegister(instr->left()).Is(InstanceofStub::left()));
   ASSERT(ToRegister(instr->right()).Is(InstanceofStub::right()));
 
-  InstanceofStub stub(InstanceofStub::kArgsInRegisters);
+  InstanceofStub stub(isolate(), InstanceofStub::kArgsInRegisters);
   CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
 
   // InstanceofStub returns a result in x0:
@@ -3078,7 +3081,7 @@ void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
   ASSERT(ToRegister(instr->value()).Is(InstanceofStub::left()));
   __ LoadObject(InstanceofStub::right(), instr->function());
 
-  InstanceofStub stub(flags);
+  InstanceofStub stub(isolate(), flags);
   CallCodeGeneric(stub.GetCode(isolate()),
                   RelocInfo::CODE_TARGET,
                   instr,
@@ -3990,7 +3993,7 @@ void LCodeGen::DoPower(LPower* instr) {
   ASSERT(ToDoubleRegister(instr->result()).is(d0));
 
   if (exponent_type.IsSmi()) {
-    MathPowStub stub(MathPowStub::TAGGED);
+    MathPowStub stub(isolate(), MathPowStub::TAGGED);
     __ CallStub(&stub);
   } else if (exponent_type.IsTagged()) {
     Label no_deopt;
@@ -3999,18 +4002,18 @@ void LCodeGen::DoPower(LPower* instr) {
     DeoptimizeIfNotRoot(x0, Heap::kHeapNumberMapRootIndex,
                         instr->environment());
     __ Bind(&no_deopt);
-    MathPowStub stub(MathPowStub::TAGGED);
+    MathPowStub stub(isolate(), MathPowStub::TAGGED);
     __ CallStub(&stub);
   } else if (exponent_type.IsInteger32()) {
     // Ensure integer exponent has no garbage in top 32-bits, as MathPowStub
     // supports large integer exponents.
     Register exponent = ToRegister(instr->right());
     __ Sxtw(exponent, exponent);
-    MathPowStub stub(MathPowStub::INTEGER);
+    MathPowStub stub(isolate(), MathPowStub::INTEGER);
     __ CallStub(&stub);
   } else {
     ASSERT(exponent_type.IsDouble());
-    MathPowStub stub(MathPowStub::DOUBLE);
+    MathPowStub stub(isolate(), MathPowStub::DOUBLE);
     __ CallStub(&stub);
   }
 }
@@ -5312,7 +5315,8 @@ void LCodeGen::DoStringAdd(LStringAdd* instr) {
   ASSERT(ToRegister(instr->context()).is(cp));
   ASSERT(ToRegister(instr->left()).Is(x1));
   ASSERT(ToRegister(instr->right()).Is(x0));
-  StringAddStub stub(instr->hydrogen()->flags(),
+  StringAddStub stub(isolate(),
+                     instr->hydrogen()->flags(),
                      instr->hydrogen()->pretenure_flag());
   CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
 }
@@ -5637,7 +5641,7 @@ void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) {
         this, Safepoint::kWithRegistersAndDoubles);
     __ Mov(x1, Operand(to_map));
     bool is_js_array = from_map->instance_type() == JS_ARRAY_TYPE;
-    TransitionElementsKindStub stub(from_kind, to_kind, is_js_array);
+    TransitionElementsKindStub stub(isolate(), from_kind, to_kind, is_js_array);
     __ CallStub(&stub);
     RecordSafepointWithRegistersAndDoubles(
         instr->pointer_map(), 0, Safepoint::kLazyDeopt);
index 4c9d35a4d380455035656a097c0bcf767445decb..79d6b503d14cd0227e2b26ab620ccd704928c3b2 100644 (file)
@@ -390,12 +390,12 @@ class LCodeGen: public LCodeGenBase {
       codegen_->masm_->Mov(to_be_pushed_lr, lr);
       switch (codegen_->expected_safepoint_kind_) {
         case Safepoint::kWithRegisters: {
-          StoreRegistersStateStub stub(kDontSaveFPRegs);
+          StoreRegistersStateStub stub(codegen_->isolate(), kDontSaveFPRegs);
           codegen_->masm_->CallStub(&stub);
           break;
         }
         case Safepoint::kWithRegistersAndDoubles: {
-          StoreRegistersStateStub stub(kSaveFPRegs);
+          StoreRegistersStateStub stub(codegen_->isolate(), kSaveFPRegs);
           codegen_->masm_->CallStub(&stub);
           break;
         }
@@ -409,12 +409,12 @@ class LCodeGen: public LCodeGenBase {
       ASSERT((kind & Safepoint::kWithRegisters) != 0);
       switch (kind) {
         case Safepoint::kWithRegisters: {
-          RestoreRegistersStateStub stub(kDontSaveFPRegs);
+          RestoreRegistersStateStub stub(codegen_->isolate(), kDontSaveFPRegs);
           codegen_->masm_->CallStub(&stub);
           break;
         }
         case Safepoint::kWithRegistersAndDoubles: {
-          RestoreRegistersStateStub stub(kSaveFPRegs);
+          RestoreRegistersStateStub stub(codegen_->isolate(), kSaveFPRegs);
           codegen_->masm_->CallStub(&stub);
           break;
         }
index 70d601b2e6e0399fe4ceabb0ae11ef1e6a094be3..d130a55e41f6c25d181073f88775ac9403a61bd7 100644 (file)
@@ -1651,7 +1651,7 @@ void MacroAssembler::CallRuntime(const Runtime::Function* f,
   Mov(x0, num_arguments);
   Mov(x1, ExternalReference(f, isolate()));
 
-  CEntryStub stub(1, save_doubles);
+  CEntryStub stub(isolate(), 1, save_doubles);
   CallStub(&stub);
 }
 
@@ -1729,7 +1729,7 @@ void MacroAssembler::CallApiFunctionAndReturn(
   // Native call returns to the DirectCEntry stub which redirects to the
   // return address pushed on stack (could have moved after GC).
   // DirectCEntry stub itself is generated early and never moves.
-  DirectCEntryStub stub;
+  DirectCEntryStub stub(isolate());
   stub.GenerateCall(this, x3);
 
   if (FLAG_log_timer_events) {
@@ -1813,14 +1813,14 @@ void MacroAssembler::CallExternalReference(const ExternalReference& ext,
   Mov(x0, num_arguments);
   Mov(x1, ext);
 
-  CEntryStub stub(1);
+  CEntryStub stub(isolate(), 1);
   CallStub(&stub);
 }
 
 
 void MacroAssembler::JumpToExternalReference(const ExternalReference& builtin) {
   Mov(x1, builtin);
-  CEntryStub stub(1);
+  CEntryStub stub(isolate(), 1);
   Jump(stub.GetCode(isolate()), RelocInfo::CODE_TARGET);
 }
 
@@ -2919,7 +2919,8 @@ void MacroAssembler::TruncateDoubleToI(Register result,
   Push(lr);
   Push(double_input);  // Put input on stack.
 
-  DoubleToIStub stub(jssp,
+  DoubleToIStub stub(isolate(),
+                     jssp,
                      result,
                      0,
                      true,   // is_truncating
@@ -2947,7 +2948,8 @@ void MacroAssembler::TruncateHeapNumberToI(Register result,
 
   // If we fell through then inline version didn't succeed - call stub instead.
   Push(lr);
-  DoubleToIStub stub(object,
+  DoubleToIStub stub(isolate(),
+                     object,
                      result,
                      HeapNumber::kValueOffset - kHeapObjectTag,
                      true,   // is_truncating
@@ -3188,7 +3190,7 @@ void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
 void MacroAssembler::DebugBreak() {
   Mov(x0, 0);
   Mov(x1, ExternalReference(Runtime::kDebugBreak, isolate()));
-  CEntryStub ces(1);
+  CEntryStub ces(isolate(), 1);
   ASSERT(AllowThisStubCall(&ces));
   Call(ces.GetCode(isolate()), RelocInfo::DEBUG_BREAK);
 }
@@ -4203,7 +4205,7 @@ void MacroAssembler::RememberedSetHelper(Register object,  // For debug tests.
   Bind(&store_buffer_overflow);
   Push(lr);
   StoreBufferOverflowStub store_buffer_overflow_stub =
-      StoreBufferOverflowStub(fp_mode);
+      StoreBufferOverflowStub(isolate(), fp_mode);
   CallStub(&store_buffer_overflow_stub);
   Pop(lr);
 
@@ -4396,7 +4398,8 @@ void MacroAssembler::RecordWrite(Register object,
   if (lr_status == kLRHasNotBeenSaved) {
     Push(lr);
   }
-  RecordWriteStub stub(object, value, address, remembered_set_action, fp_mode);
+  RecordWriteStub stub(isolate(), object, value, address, remembered_set_action,
+                       fp_mode);
   CallStub(&stub);
   if (lr_status == kLRHasNotBeenSaved) {
     Pop(lr);
index 81f46171dfbfde8b066e822c198bd55d3e058552..ebb9839235dff6786c6ccb66a0b89ffb4c73ec82 100644 (file)
@@ -1454,7 +1454,7 @@ void RegExpMacroAssemblerARM64::CallCheckStackGuardState(Register scratch) {
   ExternalReference check_stack_guard_state =
       ExternalReference::re_check_stack_guard_state(isolate());
   __ Mov(scratch, check_stack_guard_state);
-  DirectCEntryStub stub;
+  DirectCEntryStub stub(isolate());
   stub.GenerateCall(masm_, scratch);
 
   // The input string may have been moved in memory, we need to reload it.
index b46d813828e1798d43240d295af914f5f1bea853..9a6ef892c7d221cc961050d193194d505b90b095 100644 (file)
@@ -785,7 +785,7 @@ void StubCompiler::GenerateFastApiCall(MacroAssembler* masm,
   __ Mov(api_function_address, ref);
 
   // Jump to stub.
-  CallApiFunctionStub stub(is_store, call_data_undefined, argc);
+  CallApiFunctionStub stub(isolate, is_store, call_data_undefined, argc);
   __ TailCallStub(&stub);
 }
 
@@ -994,12 +994,14 @@ void LoadStubCompiler::GenerateLoadField(Register reg,
                                          Representation representation) {
   __ Mov(receiver(), reg);
   if (kind() == Code::LOAD_IC) {
-    LoadFieldStub stub(field.is_inobject(holder),
+    LoadFieldStub stub(isolate(),
+                       field.is_inobject(holder),
                        field.translate(holder),
                        representation);
     GenerateTailCall(masm(), stub.GetCode(isolate()));
   } else {
-    KeyedLoadFieldStub stub(field.is_inobject(holder),
+    KeyedLoadFieldStub stub(isolate(),
+                            field.is_inobject(holder),
                             field.translate(holder),
                             representation);
     GenerateTailCall(masm(), stub.GetCode(isolate()));
@@ -1064,7 +1066,7 @@ void LoadStubCompiler::GenerateLoadCallback(
   ExternalReference ref = ExternalReference(&fun, type, isolate());
   __ Mov(getter_address_reg, ref);
 
-  CallApiGetterStub stub;
+  CallApiGetterStub stub(isolate());
   __ TailCallStub(&stub);
 }
 
index 0840f6c21f4dd966a66ed94f65075be37a8814eb..0c9f3af41f325114e9bd23d22b18b17e61a4a7ee 100644 (file)
@@ -219,7 +219,8 @@ void BinaryOpICStub::GenerateAheadOfTime(Isolate* isolate) {
   // Generate the uninitialized versions of the stub.
   for (int op = Token::BIT_OR; op <= Token::MOD; ++op) {
     for (int mode = NO_OVERWRITE; mode <= OVERWRITE_RIGHT; ++mode) {
-      BinaryOpICStub stub(static_cast<Token::Value>(op),
+      BinaryOpICStub stub(isolate,
+                          static_cast<Token::Value>(op),
                           static_cast<OverwriteMode>(mode));
       stub.GetCode(isolate);
     }
@@ -238,7 +239,7 @@ void BinaryOpICStub::PrintState(StringStream* stream) {
 // static
 void BinaryOpICStub::GenerateAheadOfTime(Isolate* isolate,
                                          const BinaryOpIC::State& state) {
-  BinaryOpICStub stub(state);
+  BinaryOpICStub stub(isolate, state);
   stub.GetCode(isolate);
 }
 
@@ -259,7 +260,7 @@ void BinaryOpICWithAllocationSiteStub::PrintState(StringStream* stream) {
 void BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(
     Isolate* isolate, const BinaryOpIC::State& state) {
   if (state.CouldCreateAllocationMementos()) {
-    BinaryOpICWithAllocationSiteStub stub(state);
+    BinaryOpICWithAllocationSiteStub stub(isolate, state);
     stub.GetCode(isolate);
   }
 }
@@ -540,7 +541,7 @@ void KeyedLoadDictionaryElementPlatformStub::Generate(
 
 
 void CreateAllocationSiteStub::GenerateAheadOfTime(Isolate* isolate) {
-  CreateAllocationSiteStub stub;
+  CreateAllocationSiteStub stub(isolate);
   stub.GetCode(isolate);
 }
 
@@ -696,8 +697,8 @@ bool ToBooleanStub::Types::CanBeUndetectable() const {
 
 
 void StubFailureTrampolineStub::GenerateAheadOfTime(Isolate* isolate) {
-  StubFailureTrampolineStub stub1(NOT_JS_FUNCTION_STUB_MODE);
-  StubFailureTrampolineStub stub2(JS_FUNCTION_STUB_MODE);
+  StubFailureTrampolineStub stub1(isolate, NOT_JS_FUNCTION_STUB_MODE);
+  StubFailureTrampolineStub stub2(isolate, JS_FUNCTION_STUB_MODE);
   stub1.GetCode(isolate);
   stub2.GetCode(isolate);
 }
@@ -723,36 +724,38 @@ static void InstallDescriptor(Isolate* isolate, HydrogenCodeStub* stub) {
 
 
 void ArrayConstructorStubBase::InstallDescriptors(Isolate* isolate) {
-  ArrayNoArgumentConstructorStub stub1(GetInitialFastElementsKind());
+  ArrayNoArgumentConstructorStub stub1(isolate, GetInitialFastElementsKind());
   InstallDescriptor(isolate, &stub1);
-  ArraySingleArgumentConstructorStub stub2(GetInitialFastElementsKind());
+  ArraySingleArgumentConstructorStub stub2(isolate,
+                                           GetInitialFastElementsKind());
   InstallDescriptor(isolate, &stub2);
-  ArrayNArgumentsConstructorStub stub3(GetInitialFastElementsKind());
+  ArrayNArgumentsConstructorStub stub3(isolate, GetInitialFastElementsKind());
   InstallDescriptor(isolate, &stub3);
 }
 
 
 void NumberToStringStub::InstallDescriptors(Isolate* isolate) {
-  NumberToStringStub stub;
+  NumberToStringStub stub(isolate);
   InstallDescriptor(isolate, &stub);
 }
 
 
 void FastNewClosureStub::InstallDescriptors(Isolate* isolate) {
-  FastNewClosureStub stub(STRICT, false);
+  FastNewClosureStub stub(isolate, STRICT, false);
   InstallDescriptor(isolate, &stub);
 }
 
 
 void FastNewContextStub::InstallDescriptors(Isolate* isolate) {
-  FastNewContextStub stub(FastNewContextStub::kMaximumSlots);
+  FastNewContextStub stub(isolate, FastNewContextStub::kMaximumSlots);
   InstallDescriptor(isolate, &stub);
 }
 
 
 // static
 void FastCloneShallowArrayStub::InstallDescriptors(Isolate* isolate) {
-  FastCloneShallowArrayStub stub(FastCloneShallowArrayStub::CLONE_ELEMENTS,
+  FastCloneShallowArrayStub stub(isolate,
+                                 FastCloneShallowArrayStub::CLONE_ELEMENTS,
                                  DONT_TRACK_ALLOCATION_SITE, 0);
   InstallDescriptor(isolate, &stub);
 }
@@ -760,40 +763,41 @@ void FastCloneShallowArrayStub::InstallDescriptors(Isolate* isolate) {
 
 // static
 void BinaryOpICStub::InstallDescriptors(Isolate* isolate) {
-  BinaryOpICStub stub(Token::ADD, NO_OVERWRITE);
+  BinaryOpICStub stub(isolate, Token::ADD, NO_OVERWRITE);
   InstallDescriptor(isolate, &stub);
 }
 
 
 // static
 void BinaryOpWithAllocationSiteStub::InstallDescriptors(Isolate* isolate) {
-  BinaryOpWithAllocationSiteStub stub(Token::ADD, NO_OVERWRITE);
+  BinaryOpWithAllocationSiteStub stub(isolate, Token::ADD, NO_OVERWRITE);
   InstallDescriptor(isolate, &stub);
 }
 
 
 // static
 void StringAddStub::InstallDescriptors(Isolate* isolate) {
-  StringAddStub stub(STRING_ADD_CHECK_NONE, NOT_TENURED);
+  StringAddStub stub(isolate, STRING_ADD_CHECK_NONE, NOT_TENURED);
   InstallDescriptor(isolate, &stub);
 }
 
 
 // static
 void RegExpConstructResultStub::InstallDescriptors(Isolate* isolate) {
-  RegExpConstructResultStub stub;
+  RegExpConstructResultStub stub(isolate);
   InstallDescriptor(isolate, &stub);
 }
 
 
 ArrayConstructorStub::ArrayConstructorStub(Isolate* isolate)
-    : argument_count_(ANY) {
+    : PlatformCodeStub(isolate), argument_count_(ANY) {
   ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
 }
 
 
 ArrayConstructorStub::ArrayConstructorStub(Isolate* isolate,
-                                           int argument_count) {
+                                           int argument_count)
+    : PlatformCodeStub(isolate) {
   if (argument_count == 0) {
     argument_count_ = NONE;
   } else if (argument_count == 1) {
@@ -808,16 +812,16 @@ ArrayConstructorStub::ArrayConstructorStub(Isolate* isolate,
 
 
 void InternalArrayConstructorStubBase::InstallDescriptors(Isolate* isolate) {
-  InternalArrayNoArgumentConstructorStub stub1(FAST_ELEMENTS);
+  InternalArrayNoArgumentConstructorStub stub1(isolate, FAST_ELEMENTS);
   InstallDescriptor(isolate, &stub1);
-  InternalArraySingleArgumentConstructorStub stub2(FAST_ELEMENTS);
+  InternalArraySingleArgumentConstructorStub stub2(isolate, FAST_ELEMENTS);
   InstallDescriptor(isolate, &stub2);
-  InternalArrayNArgumentsConstructorStub stub3(FAST_ELEMENTS);
+  InternalArrayNArgumentsConstructorStub stub3(isolate, FAST_ELEMENTS);
   InstallDescriptor(isolate, &stub3);
 }
 
 InternalArrayConstructorStub::InternalArrayConstructorStub(
-    Isolate* isolate) {
+    Isolate* isolate) : PlatformCodeStub(isolate) {
   InternalArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
 }
 
index fd0c0390b396e309b5e690035a1b67891645c068..c28812c63e3978e08e0231369cc402912e1a6454 100644 (file)
@@ -174,6 +174,7 @@ class CodeStub BASE_EMBEDDED {
 
   static const char* MajorName(Major major_key, bool allow_unknown_keys);
 
+  explicit CodeStub(Isolate* isolate) : isolate_(isolate) { }
   virtual ~CodeStub() {}
 
   static void GenerateStubsAheadOfTime(Isolate* isolate);
@@ -209,6 +210,8 @@ class CodeStub BASE_EMBEDDED {
   // Returns a name for logging/debugging purposes.
   SmartArrayPointer<const char> GetName();
 
+  Isolate* isolate() const { return isolate_; }
+
  protected:
   static bool CanUseFPRegisters();
 
@@ -265,11 +268,15 @@ class CodeStub BASE_EMBEDDED {
       kStubMajorKeyBits, kStubMinorKeyBits> {};  // NOLINT
 
   friend class BreakPointIterator;
+
+  Isolate* isolate_;
 };
 
 
 class PlatformCodeStub : public CodeStub {
  public:
+  explicit PlatformCodeStub(Isolate* isolate) : CodeStub(isolate) { }
+
   // Retrieve the code for the stub. Generate the code if needed.
   virtual Handle<Code> GenerateCode(Isolate* isolate);
 
@@ -385,7 +392,8 @@ class HydrogenCodeStub : public CodeStub {
     INITIALIZED
   };
 
-  explicit HydrogenCodeStub(InitializationState state = INITIALIZED) {
+  HydrogenCodeStub(Isolate* isolate, InitializationState state = INITIALIZED)
+      : CodeStub(isolate) {
     is_uninitialized_ = (state == UNINITIALIZED);
   }
 
@@ -493,7 +501,7 @@ class NopRuntimeCallHelper : public RuntimeCallHelper {
 
 class ToNumberStub: public HydrogenCodeStub {
  public:
-  ToNumberStub() { }
+  explicit ToNumberStub(Isolate* isolate) : HydrogenCodeStub(isolate) { }
 
   virtual Handle<Code> GenerateCode(Isolate* isolate);
 
@@ -502,7 +510,7 @@ class ToNumberStub: public HydrogenCodeStub {
       CodeStubInterfaceDescriptor* descriptor);
 
   static void InstallDescriptors(Isolate* isolate) {
-    ToNumberStub stub;
+    ToNumberStub stub(isolate);
     stub.InitializeInterfaceDescriptor(
         isolate,
         isolate->code_stub_interface_descriptor(CodeStub::ToNumber));
@@ -516,7 +524,7 @@ class ToNumberStub: public HydrogenCodeStub {
 
 class NumberToStringStub V8_FINAL : public HydrogenCodeStub {
  public:
-  NumberToStringStub() {}
+  explicit NumberToStringStub(Isolate* isolate) : HydrogenCodeStub(isolate) {}
 
   virtual Handle<Code> GenerateCode(Isolate* isolate) V8_OVERRIDE;
 
@@ -537,9 +545,12 @@ class NumberToStringStub V8_FINAL : public HydrogenCodeStub {
 
 class FastNewClosureStub : public HydrogenCodeStub {
  public:
-  explicit FastNewClosureStub(StrictMode strict_mode, bool is_generator)
-    : strict_mode_(strict_mode),
-      is_generator_(is_generator) { }
+  FastNewClosureStub(Isolate* isolate,
+                     StrictMode strict_mode,
+                     bool is_generator)
+      : HydrogenCodeStub(isolate),
+        strict_mode_(strict_mode),
+        is_generator_(is_generator) { }
 
   virtual Handle<Code> GenerateCode(Isolate* isolate);
 
@@ -571,7 +582,8 @@ class FastNewContextStub V8_FINAL : public HydrogenCodeStub {
  public:
   static const int kMaximumSlots = 64;
 
-  explicit FastNewContextStub(int slots) : slots_(slots) {
+  FastNewContextStub(Isolate* isolate, int slots)
+      : HydrogenCodeStub(isolate), slots_(slots) {
     ASSERT(slots_ > 0 && slots_ <= kMaximumSlots);
   }
 
@@ -610,10 +622,12 @@ class FastCloneShallowArrayStub : public HydrogenCodeStub {
 
   static const int kFastCloneModeCount = LAST_CLONE_MODE + 1;
 
-  FastCloneShallowArrayStub(Mode mode,
+  FastCloneShallowArrayStub(Isolate* isolate,
+                            Mode mode,
                             AllocationSiteMode allocation_site_mode,
                             int length)
-      : mode_(mode),
+      : HydrogenCodeStub(isolate),
+        mode_(mode),
         allocation_site_mode_(allocation_site_mode),
         length_((mode == COPY_ON_WRITE_ELEMENTS) ? 0 : length) {
     ASSERT_GE(length_, 0);
@@ -674,7 +688,8 @@ class FastCloneShallowObjectStub : public HydrogenCodeStub {
   // Maximum number of properties in copied object.
   static const int kMaximumClonedProperties = 6;
 
-  explicit FastCloneShallowObjectStub(int length) : length_(length) {
+  FastCloneShallowObjectStub(Isolate* isolate, int length)
+      : HydrogenCodeStub(isolate), length_(length) {
     ASSERT_GE(length_, 0);
     ASSERT_LE(length_, kMaximumClonedProperties);
   }
@@ -699,7 +714,8 @@ class FastCloneShallowObjectStub : public HydrogenCodeStub {
 
 class CreateAllocationSiteStub : public HydrogenCodeStub {
  public:
-  explicit CreateAllocationSiteStub() { }
+  explicit CreateAllocationSiteStub(Isolate* isolate)
+      : HydrogenCodeStub(isolate) { }
 
   virtual Handle<Code> GenerateCode(Isolate* isolate);
 
@@ -726,7 +742,8 @@ class InstanceofStub: public PlatformCodeStub {
     kReturnTrueFalseObject = 1 << 2
   };
 
-  explicit InstanceofStub(Flags flags) : flags_(flags) { }
+  InstanceofStub(Isolate* isolate, Flags flags)
+      : PlatformCodeStub(isolate), flags_(flags) { }
 
   static Register left();
   static Register right();
@@ -800,8 +817,8 @@ class MathPowStub: public PlatformCodeStub {
  public:
   enum ExponentType { INTEGER, DOUBLE, TAGGED, ON_STACK };
 
-  explicit MathPowStub(ExponentType exponent_type)
-      : exponent_type_(exponent_type) { }
+  MathPowStub(Isolate* isolate, ExponentType exponent_type)
+      : PlatformCodeStub(isolate), exponent_type_(exponent_type) { }
   virtual void Generate(MacroAssembler* masm);
 
  private:
@@ -814,7 +831,8 @@ class MathPowStub: public PlatformCodeStub {
 
 class ICStub: public PlatformCodeStub {
  public:
-  explicit ICStub(Code::Kind kind) : kind_(kind) { }
+  ICStub(Isolate* isolate, Code::Kind kind)
+      : PlatformCodeStub(isolate), kind_(kind) { }
   virtual Code::Kind GetCodeKind() const { return kind_; }
   virtual InlineCacheState GetICState() { return MONOMORPHIC; }
 
@@ -840,7 +858,8 @@ class ICStub: public PlatformCodeStub {
 
 class FunctionPrototypeStub: public ICStub {
  public:
-  explicit FunctionPrototypeStub(Code::Kind kind) : ICStub(kind) { }
+  FunctionPrototypeStub(Isolate* isolate, Code::Kind kind)
+      : ICStub(isolate, kind) { }
   virtual void Generate(MacroAssembler* masm);
 
  private:
@@ -850,8 +869,8 @@ class FunctionPrototypeStub: public ICStub {
 
 class StoreICStub: public ICStub {
  public:
-  StoreICStub(Code::Kind kind, StrictMode strict_mode)
-      : ICStub(kind), strict_mode_(strict_mode) { }
+  StoreICStub(Isolate* isolate, Code::Kind kind, StrictMode strict_mode)
+      : ICStub(isolate, kind), strict_mode_(strict_mode) { }
 
  protected:
   virtual ExtraICState GetExtraICState() {
@@ -871,6 +890,7 @@ class StoreICStub: public ICStub {
 
 class HICStub: public HydrogenCodeStub {
  public:
+  explicit HICStub(Isolate* isolate) : HydrogenCodeStub(isolate) { }
   virtual Code::Kind GetCodeKind() const { return kind(); }
   virtual InlineCacheState GetICState() { return MONOMORPHIC; }
 
@@ -886,7 +906,7 @@ class HandlerStub: public HICStub {
   virtual ExtraICState GetExtraICState() { return kind(); }
 
  protected:
-  HandlerStub() : HICStub() { }
+  explicit HandlerStub(Isolate* isolate) : HICStub(isolate) { }
   virtual int NotMissMinorKey() { return bit_field_; }
   int bit_field_;
 };
@@ -894,7 +914,10 @@ class HandlerStub: public HICStub {
 
 class LoadFieldStub: public HandlerStub {
  public:
-  LoadFieldStub(bool inobject, int index, Representation representation) {
+  LoadFieldStub(Isolate* isolate,
+                bool inobject,
+                int index, Representation representation)
+      : HandlerStub(isolate) {
     Initialize(Code::LOAD_IC, inobject, index, representation);
   }
 
@@ -931,7 +954,7 @@ class LoadFieldStub: public HandlerStub {
   virtual Code::StubType GetStubType() { return Code::FAST; }
 
  protected:
-  LoadFieldStub() : HandlerStub() { }
+  explicit LoadFieldStub(Isolate* isolate) : HandlerStub(isolate) { }
 
   void Initialize(Code::Kind kind,
                   bool inobject,
@@ -954,7 +977,7 @@ class LoadFieldStub: public HandlerStub {
 
 class StringLengthStub: public HandlerStub {
  public:
-  explicit StringLengthStub() : HandlerStub() {
+  explicit StringLengthStub(Isolate* isolate) : HandlerStub(isolate) {
     Initialize(Code::LOAD_IC);
   }
   virtual Handle<Code> GenerateCode(Isolate* isolate);
@@ -978,7 +1001,7 @@ class StringLengthStub: public HandlerStub {
 
 class KeyedStringLengthStub: public StringLengthStub {
  public:
-  explicit KeyedStringLengthStub() : StringLengthStub() {
+  explicit KeyedStringLengthStub(Isolate* isolate) : StringLengthStub(isolate) {
     Initialize(Code::KEYED_LOAD_IC);
   }
   virtual void InitializeInterfaceDescriptor(
@@ -992,7 +1015,8 @@ class KeyedStringLengthStub: public StringLengthStub {
 
 class StoreGlobalStub : public HandlerStub {
  public:
-  explicit StoreGlobalStub(bool is_constant, bool check_global) {
+  StoreGlobalStub(Isolate* isolate, bool is_constant, bool check_global)
+      : HandlerStub(isolate) {
     bit_field_ = IsConstantBits::encode(is_constant) |
         CheckGlobalBits::encode(check_global);
   }
@@ -1055,9 +1079,10 @@ class StoreGlobalStub : public HandlerStub {
 
 class CallApiFunctionStub : public PlatformCodeStub {
  public:
-  CallApiFunctionStub(bool is_store,
+  CallApiFunctionStub(Isolate* isolate,
+                      bool is_store,
                       bool call_data_undefined,
-                      int argc) {
+                      int argc) : PlatformCodeStub(isolate) {
     bit_field_ =
         IsStoreBits::encode(is_store) |
         CallDataUndefinedBits::encode(call_data_undefined) |
@@ -1082,7 +1107,7 @@ class CallApiFunctionStub : public PlatformCodeStub {
 
 class CallApiGetterStub : public PlatformCodeStub {
  public:
-  CallApiGetterStub() {}
+  explicit CallApiGetterStub(Isolate* isolate) : PlatformCodeStub(isolate) {}
 
  private:
   virtual void Generate(MacroAssembler* masm) V8_OVERRIDE;
@@ -1095,8 +1120,10 @@ class CallApiGetterStub : public PlatformCodeStub {
 
 class KeyedLoadFieldStub: public LoadFieldStub {
  public:
-  KeyedLoadFieldStub(bool inobject, int index, Representation representation)
-      : LoadFieldStub() {
+  KeyedLoadFieldStub(Isolate* isolate,
+                     bool inobject,
+                     int index, Representation representation)
+      : LoadFieldStub(isolate) {
     Initialize(Code::KEYED_LOAD_IC, inobject, index, representation);
   }
 
@@ -1111,10 +1138,11 @@ class KeyedLoadFieldStub: public LoadFieldStub {
 
 class BinaryOpICStub : public HydrogenCodeStub {
  public:
-  BinaryOpICStub(Token::Value op, OverwriteMode mode)
-      : HydrogenCodeStub(UNINITIALIZED), state_(op, mode) {}
+  BinaryOpICStub(Isolate* isolate, Token::Value op, OverwriteMode mode)
+      : HydrogenCodeStub(isolate, UNINITIALIZED), state_(op, mode) {}
 
-  explicit BinaryOpICStub(const BinaryOpIC::State& state) : state_(state) {}
+  BinaryOpICStub(Isolate* isolate, const BinaryOpIC::State& state)
+      : HydrogenCodeStub(isolate), state_(state) {}
 
   static void GenerateAheadOfTime(Isolate* isolate);
 
@@ -1168,8 +1196,9 @@ class BinaryOpICStub : public HydrogenCodeStub {
 // call support for stubs in Hydrogen.
 class BinaryOpICWithAllocationSiteStub V8_FINAL : public PlatformCodeStub {
  public:
-  explicit BinaryOpICWithAllocationSiteStub(const BinaryOpIC::State& state)
-      : state_(state) {}
+  BinaryOpICWithAllocationSiteStub(Isolate* isolate,
+                                   const BinaryOpIC::State& state)
+      : PlatformCodeStub(isolate), state_(state) {}
 
   static void GenerateAheadOfTime(Isolate* isolate);
 
@@ -1215,11 +1244,14 @@ class BinaryOpICWithAllocationSiteStub V8_FINAL : public PlatformCodeStub {
 
 class BinaryOpWithAllocationSiteStub V8_FINAL : public BinaryOpICStub {
  public:
-  BinaryOpWithAllocationSiteStub(Token::Value op, OverwriteMode mode)
-      : BinaryOpICStub(op, mode) {}
+  BinaryOpWithAllocationSiteStub(Isolate* isolate,
+                                 Token::Value op,
+                                 OverwriteMode mode)
+      : BinaryOpICStub(isolate, op, mode) {}
 
-  explicit BinaryOpWithAllocationSiteStub(const BinaryOpIC::State& state)
-      : BinaryOpICStub(state) {}
+  BinaryOpWithAllocationSiteStub(Isolate* isolate,
+                                 const BinaryOpIC::State& state)
+      : BinaryOpICStub(isolate, state) {}
 
   virtual void InitializeInterfaceDescriptor(
       Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) V8_OVERRIDE;
@@ -1257,8 +1289,11 @@ enum StringAddFlags {
 
 class StringAddStub V8_FINAL : public HydrogenCodeStub {
  public:
-  StringAddStub(StringAddFlags flags, PretenureFlag pretenure_flag)
-      : bit_field_(StringAddFlagsBits::encode(flags) |
+  StringAddStub(Isolate* isolate,
+                StringAddFlags flags,
+                PretenureFlag pretenure_flag)
+      : HydrogenCodeStub(isolate),
+        bit_field_(StringAddFlagsBits::encode(flags) |
                    PretenureFlagBits::encode(pretenure_flag)) {}
 
   StringAddFlags flags() const {
@@ -1301,11 +1336,13 @@ class StringAddStub V8_FINAL : public HydrogenCodeStub {
 
 class ICCompareStub: public PlatformCodeStub {
  public:
-  ICCompareStub(Token::Value op,
+  ICCompareStub(Isolate* isolate,
+                Token::Value op,
                 CompareIC::State left,
                 CompareIC::State right,
                 CompareIC::State handler)
-      : op_(op),
+      : PlatformCodeStub(isolate),
+        op_(op),
         left_(left),
         right_(right),
         state_(handler) {
@@ -1369,18 +1406,20 @@ class CompareNilICStub : public HydrogenCodeStub  {
   Type* GetType(Zone* zone, Handle<Map> map = Handle<Map>());
   Type* GetInputType(Zone* zone, Handle<Map> map);
 
-  explicit CompareNilICStub(NilValue nil) : nil_value_(nil) { }
+  CompareNilICStub(Isolate* isolate, NilValue nil)
+      : HydrogenCodeStub(isolate), nil_value_(nil) { }
 
-  CompareNilICStub(ExtraICState ic_state,
+  CompareNilICStub(Isolate* isolate,
+                   ExtraICState ic_state,
                    InitializationState init_state = INITIALIZED)
-      : HydrogenCodeStub(init_state),
+      : HydrogenCodeStub(isolate, init_state),
         nil_value_(NilValueField::decode(ic_state)),
         state_(State(TypesField::decode(ic_state))) {
       }
 
   static Handle<Code> GetUninitialized(Isolate* isolate,
                                        NilValue nil) {
-    return CompareNilICStub(nil, UNINITIALIZED).GetCode(isolate);
+    return CompareNilICStub(isolate, nil, UNINITIALIZED).GetCode(isolate);
   }
 
   virtual void InitializeInterfaceDescriptor(
@@ -1388,7 +1427,7 @@ class CompareNilICStub : public HydrogenCodeStub  {
       CodeStubInterfaceDescriptor* descriptor);
 
   static void InstallDescriptors(Isolate* isolate) {
-    CompareNilICStub compare_stub(kNullValue, UNINITIALIZED);
+    CompareNilICStub compare_stub(isolate, kNullValue, UNINITIALIZED);
     compare_stub.InitializeInterfaceDescriptor(
         isolate,
         isolate->code_stub_interface_descriptor(CodeStub::CompareNilIC));
@@ -1446,8 +1485,10 @@ class CompareNilICStub : public HydrogenCodeStub  {
     void Print(StringStream* stream) const;
   };
 
-  CompareNilICStub(NilValue nil, InitializationState init_state)
-      : HydrogenCodeStub(init_state), nil_value_(nil) { }
+  CompareNilICStub(Isolate* isolate,
+                   NilValue nil,
+                   InitializationState init_state)
+      : HydrogenCodeStub(isolate, init_state), nil_value_(nil) { }
 
   class NilValueField : public BitField<NilValue, 0, 1> {};
   class TypesField    : public BitField<byte,     1, NUMBER_OF_TYPES> {};
@@ -1464,9 +1505,12 @@ class CompareNilICStub : public HydrogenCodeStub  {
 
 class CEntryStub : public PlatformCodeStub {
  public:
-  explicit CEntryStub(int result_size,
-                      SaveFPRegsMode save_doubles = kDontSaveFPRegs)
-      : result_size_(result_size), save_doubles_(save_doubles) { }
+  CEntryStub(Isolate* isolate,
+             int result_size,
+             SaveFPRegsMode save_doubles = kDontSaveFPRegs)
+      : PlatformCodeStub(isolate),
+        result_size_(result_size),
+        save_doubles_(save_doubles) { }
 
   void Generate(MacroAssembler* masm);
 
@@ -1496,7 +1540,7 @@ class CEntryStub : public PlatformCodeStub {
 
 class JSEntryStub : public PlatformCodeStub {
  public:
-  JSEntryStub() { }
+  explicit JSEntryStub(Isolate* isolate) : PlatformCodeStub(isolate) { }
 
   void Generate(MacroAssembler* masm) { GenerateBody(masm, false); }
 
@@ -1515,7 +1559,7 @@ class JSEntryStub : public PlatformCodeStub {
 
 class JSConstructEntryStub : public JSEntryStub {
  public:
-  JSConstructEntryStub() { }
+  explicit JSConstructEntryStub(Isolate* isolate) : JSEntryStub(isolate) { }
 
   void Generate(MacroAssembler* masm) { GenerateBody(masm, true); }
 
@@ -1537,7 +1581,8 @@ class ArgumentsAccessStub: public PlatformCodeStub {
     NEW_STRICT
   };
 
-  explicit ArgumentsAccessStub(Type type) : type_(type) { }
+  ArgumentsAccessStub(Isolate* isolate, Type type)
+      : PlatformCodeStub(isolate), type_(type) { }
 
  private:
   Type type_;
@@ -1557,7 +1602,7 @@ class ArgumentsAccessStub: public PlatformCodeStub {
 
 class RegExpExecStub: public PlatformCodeStub {
  public:
-  RegExpExecStub() { }
+  explicit RegExpExecStub(Isolate* isolate) : PlatformCodeStub(isolate) { }
 
  private:
   Major MajorKey() { return RegExpExec; }
@@ -1569,7 +1614,8 @@ class RegExpExecStub: public PlatformCodeStub {
 
 class RegExpConstructResultStub V8_FINAL : public HydrogenCodeStub {
  public:
-  RegExpConstructResultStub() { }
+  explicit RegExpConstructResultStub(Isolate* isolate)
+      : HydrogenCodeStub(isolate) { }
 
   virtual Handle<Code> GenerateCode(Isolate* isolate) V8_OVERRIDE;
 
@@ -1594,8 +1640,8 @@ class RegExpConstructResultStub V8_FINAL : public HydrogenCodeStub {
 
 class CallFunctionStub: public PlatformCodeStub {
  public:
-  CallFunctionStub(int argc, CallFunctionFlags flags)
-      : argc_(argc), flags_(flags) { }
+  CallFunctionStub(Isolate* isolate, int argc, CallFunctionFlags flags)
+      : PlatformCodeStub(isolate), argc_(argc), flags_(flags) { }
 
   void Generate(MacroAssembler* masm);
 
@@ -1639,7 +1685,8 @@ class CallFunctionStub: public PlatformCodeStub {
 
 class CallConstructStub: public PlatformCodeStub {
  public:
-  explicit CallConstructStub(CallFunctionFlags flags) : flags_(flags) {}
+  CallConstructStub(Isolate* isolate, CallFunctionFlags flags)
+      : PlatformCodeStub(isolate), flags_(flags) {}
 
   void Generate(MacroAssembler* masm);
 
@@ -1841,7 +1888,8 @@ class StringCharAtGenerator {
 
 class KeyedLoadDictionaryElementStub : public HydrogenCodeStub {
  public:
-  KeyedLoadDictionaryElementStub() {}
+  explicit KeyedLoadDictionaryElementStub(Isolate* isolate)
+      : HydrogenCodeStub(isolate) {}
 
   virtual Handle<Code> GenerateCode(Isolate* isolate) V8_OVERRIDE;
 
@@ -1859,7 +1907,8 @@ class KeyedLoadDictionaryElementStub : public HydrogenCodeStub {
 
 class KeyedLoadDictionaryElementPlatformStub : public PlatformCodeStub {
  public:
-  KeyedLoadDictionaryElementPlatformStub() {}
+  explicit KeyedLoadDictionaryElementPlatformStub(Isolate* isolate)
+      : PlatformCodeStub(isolate) {}
 
   void Generate(MacroAssembler* masm);
 
@@ -1873,11 +1922,13 @@ class KeyedLoadDictionaryElementPlatformStub : public PlatformCodeStub {
 
 class DoubleToIStub : public PlatformCodeStub {
  public:
-  DoubleToIStub(Register source,
+  DoubleToIStub(Isolate* isolate,
+                Register source,
                 Register destination,
                 int offset,
                 bool is_truncating,
-                bool skip_fastpath = false) : bit_field_(0) {
+                bool skip_fastpath = false)
+      : PlatformCodeStub(isolate), bit_field_(0) {
     bit_field_ = SourceRegisterBits::encode(source.code()) |
       DestinationRegisterBits::encode(destination.code()) |
       OffsetBits::encode(offset) |
@@ -1944,7 +1995,10 @@ class DoubleToIStub : public PlatformCodeStub {
 
 class KeyedLoadFastElementStub : public HydrogenCodeStub {
  public:
-  KeyedLoadFastElementStub(bool is_js_array, ElementsKind elements_kind) {
+  KeyedLoadFastElementStub(Isolate* isolate,
+                           bool is_js_array,
+                           ElementsKind elements_kind)
+      : HydrogenCodeStub(isolate) {
     bit_field_ = ElementsKindBits::encode(elements_kind) |
         IsJSArrayBits::encode(is_js_array);
   }
@@ -1977,9 +2031,11 @@ class KeyedLoadFastElementStub : public HydrogenCodeStub {
 
 class KeyedStoreFastElementStub : public HydrogenCodeStub {
  public:
-  KeyedStoreFastElementStub(bool is_js_array,
+  KeyedStoreFastElementStub(Isolate* isolate,
+                            bool is_js_array,
                             ElementsKind elements_kind,
-                            KeyedAccessStoreMode mode) {
+                            KeyedAccessStoreMode mode)
+      : HydrogenCodeStub(isolate) {
     bit_field_ = ElementsKindBits::encode(elements_kind) |
         IsJSArrayBits::encode(is_js_array) |
         StoreModeBits::encode(mode);
@@ -2018,9 +2074,10 @@ class KeyedStoreFastElementStub : public HydrogenCodeStub {
 
 class TransitionElementsKindStub : public HydrogenCodeStub {
  public:
-  TransitionElementsKindStub(ElementsKind from_kind,
+  TransitionElementsKindStub(Isolate* isolate,
+                             ElementsKind from_kind,
                              ElementsKind to_kind,
-                             bool is_js_array) {
+                             bool is_js_array) : HydrogenCodeStub(isolate) {
     bit_field_ = FromKindBits::encode(from_kind) |
                  ToKindBits::encode(to_kind) |
                  IsJSArrayBits::encode(is_js_array);
@@ -2059,8 +2116,10 @@ class TransitionElementsKindStub : public HydrogenCodeStub {
 
 class ArrayConstructorStubBase : public HydrogenCodeStub {
  public:
-  ArrayConstructorStubBase(ElementsKind kind,
-                           AllocationSiteOverrideMode override_mode) {
+  ArrayConstructorStubBase(Isolate* isolate,
+                           ElementsKind kind,
+                           AllocationSiteOverrideMode override_mode)
+      : HydrogenCodeStub(isolate) {
     // It only makes sense to override local allocation site behavior
     // if there is a difference between the global allocation site policy
     // for an ElementsKind and the desired usage of the stub.
@@ -2106,9 +2165,10 @@ class ArrayConstructorStubBase : public HydrogenCodeStub {
 class ArrayNoArgumentConstructorStub : public ArrayConstructorStubBase {
  public:
   ArrayNoArgumentConstructorStub(
+      Isolate* isolate,
       ElementsKind kind,
       AllocationSiteOverrideMode override_mode = DONT_OVERRIDE)
-      : ArrayConstructorStubBase(kind, override_mode) {
+      : ArrayConstructorStubBase(isolate, kind, override_mode) {
   }
 
   virtual Handle<Code> GenerateCode(Isolate* isolate);
@@ -2131,9 +2191,10 @@ class ArrayNoArgumentConstructorStub : public ArrayConstructorStubBase {
 class ArraySingleArgumentConstructorStub : public ArrayConstructorStubBase {
  public:
   ArraySingleArgumentConstructorStub(
+      Isolate* isolate,
       ElementsKind kind,
       AllocationSiteOverrideMode override_mode = DONT_OVERRIDE)
-      : ArrayConstructorStubBase(kind, override_mode) {
+      : ArrayConstructorStubBase(isolate, kind, override_mode) {
   }
 
   virtual Handle<Code> GenerateCode(Isolate* isolate);
@@ -2156,9 +2217,10 @@ class ArraySingleArgumentConstructorStub : public ArrayConstructorStubBase {
 class ArrayNArgumentsConstructorStub : public ArrayConstructorStubBase {
  public:
   ArrayNArgumentsConstructorStub(
+      Isolate* isolate,
       ElementsKind kind,
       AllocationSiteOverrideMode override_mode = DONT_OVERRIDE)
-      : ArrayConstructorStubBase(kind, override_mode) {
+      : ArrayConstructorStubBase(isolate, kind, override_mode) {
   }
 
   virtual Handle<Code> GenerateCode(Isolate* isolate);
@@ -2180,7 +2242,8 @@ class ArrayNArgumentsConstructorStub : public ArrayConstructorStubBase {
 
 class InternalArrayConstructorStubBase : public HydrogenCodeStub {
  public:
-  explicit InternalArrayConstructorStubBase(ElementsKind kind) {
+  InternalArrayConstructorStubBase(Isolate* isolate, ElementsKind kind)
+      : HydrogenCodeStub(isolate) {
     kind_ = kind;
   }
 
@@ -2204,8 +2267,9 @@ class InternalArrayConstructorStubBase : public HydrogenCodeStub {
 class InternalArrayNoArgumentConstructorStub : public
     InternalArrayConstructorStubBase {
  public:
-  explicit InternalArrayNoArgumentConstructorStub(ElementsKind kind)
-      : InternalArrayConstructorStubBase(kind) { }
+  InternalArrayNoArgumentConstructorStub(Isolate* isolate,
+                                         ElementsKind kind)
+      : InternalArrayConstructorStubBase(isolate, kind) { }
 
   virtual Handle<Code> GenerateCode(Isolate* isolate);
 
@@ -2223,8 +2287,9 @@ class InternalArrayNoArgumentConstructorStub : public
 class InternalArraySingleArgumentConstructorStub : public
     InternalArrayConstructorStubBase {
  public:
-  explicit InternalArraySingleArgumentConstructorStub(ElementsKind kind)
-      : InternalArrayConstructorStubBase(kind) { }
+  InternalArraySingleArgumentConstructorStub(Isolate* isolate,
+                                             ElementsKind kind)
+      : InternalArrayConstructorStubBase(isolate, kind) { }
 
   virtual Handle<Code> GenerateCode(Isolate* isolate);
 
@@ -2242,8 +2307,8 @@ class InternalArraySingleArgumentConstructorStub : public
 class InternalArrayNArgumentsConstructorStub : public
     InternalArrayConstructorStubBase {
  public:
-  explicit InternalArrayNArgumentsConstructorStub(ElementsKind kind)
-      : InternalArrayConstructorStubBase(kind) { }
+  InternalArrayNArgumentsConstructorStub(Isolate* isolate, ElementsKind kind)
+      : InternalArrayConstructorStubBase(isolate, kind) { }
 
   virtual Handle<Code> GenerateCode(Isolate* isolate);
 
@@ -2260,10 +2325,12 @@ class InternalArrayNArgumentsConstructorStub : public
 
 class KeyedStoreElementStub : public PlatformCodeStub {
  public:
-  KeyedStoreElementStub(bool is_js_array,
+  KeyedStoreElementStub(Isolate* isolate,
+                        bool is_js_array,
                         ElementsKind elements_kind,
                         KeyedAccessStoreMode store_mode)
-      : is_js_array_(is_js_array),
+      : PlatformCodeStub(isolate),
+        is_js_array_(is_js_array),
         elements_kind_(elements_kind),
         store_mode_(store_mode),
         fp_registers_(CanUseFPRegisters()) { }
@@ -2326,10 +2393,10 @@ class ToBooleanStub: public HydrogenCodeStub {
     static Types Generic() { return Types((1 << NUMBER_OF_TYPES) - 1); }
   };
 
-  explicit ToBooleanStub(Types types = Types())
-      : types_(types) { }
-  explicit ToBooleanStub(ExtraICState state)
-      : types_(static_cast<byte>(state)) { }
+  ToBooleanStub(Isolate* isolate, Types types = Types())
+      : HydrogenCodeStub(isolate), types_(types) { }
+  ToBooleanStub(Isolate* isolate, ExtraICState state)
+      : HydrogenCodeStub(isolate), types_(static_cast<byte>(state)) { }
 
   bool UpdateStatus(Handle<Object> object);
   Types GetTypes() { return types_; }
@@ -2345,14 +2412,14 @@ class ToBooleanStub: public HydrogenCodeStub {
   virtual bool SometimesSetsUpAFrame() { return false; }
 
   static void InstallDescriptors(Isolate* isolate) {
-    ToBooleanStub stub;
+    ToBooleanStub stub(isolate);
     stub.InitializeInterfaceDescriptor(
         isolate,
         isolate->code_stub_interface_descriptor(CodeStub::ToBoolean));
   }
 
   static Handle<Code> GetUninitialized(Isolate* isolate) {
-    return ToBooleanStub(UNINITIALIZED).GetCode(isolate);
+    return ToBooleanStub(isolate, UNINITIALIZED).GetCode(isolate);
   }
 
   virtual ExtraICState GetExtraICState() {
@@ -2371,8 +2438,8 @@ class ToBooleanStub: public HydrogenCodeStub {
   Major MajorKey() { return ToBoolean; }
   int NotMissMinorKey() { return GetExtraICState(); }
 
-  explicit ToBooleanStub(InitializationState init_state) :
-    HydrogenCodeStub(init_state) {}
+  ToBooleanStub(Isolate* isolate, InitializationState init_state) :
+      HydrogenCodeStub(isolate, init_state) {}
 
   Types types_;
 };
@@ -2380,11 +2447,13 @@ class ToBooleanStub: public HydrogenCodeStub {
 
 class ElementsTransitionAndStoreStub : public HydrogenCodeStub {
  public:
-  ElementsTransitionAndStoreStub(ElementsKind from_kind,
+  ElementsTransitionAndStoreStub(Isolate* isolate,
+                                 ElementsKind from_kind,
                                  ElementsKind to_kind,
                                  bool is_jsarray,
                                  KeyedAccessStoreMode store_mode)
-      : from_kind_(from_kind),
+      : HydrogenCodeStub(isolate),
+        from_kind_(from_kind),
         to_kind_(to_kind),
         is_jsarray_(is_jsarray),
         store_mode_(store_mode) {}
@@ -2425,8 +2494,8 @@ class ElementsTransitionAndStoreStub : public HydrogenCodeStub {
 
 class StoreArrayLiteralElementStub : public PlatformCodeStub {
  public:
-  StoreArrayLiteralElementStub()
-        : fp_registers_(CanUseFPRegisters()) { }
+  explicit StoreArrayLiteralElementStub(Isolate* isolate)
+      : PlatformCodeStub(isolate), fp_registers_(CanUseFPRegisters()) { }
 
  private:
   class FPRegisters: public BitField<bool,                0, 1> {};
@@ -2444,8 +2513,10 @@ class StoreArrayLiteralElementStub : public PlatformCodeStub {
 
 class StubFailureTrampolineStub : public PlatformCodeStub {
  public:
-  explicit StubFailureTrampolineStub(StubFunctionMode function_mode)
-      : fp_registers_(CanUseFPRegisters()), function_mode_(function_mode) {}
+  StubFailureTrampolineStub(Isolate* isolate, StubFunctionMode function_mode)
+      : PlatformCodeStub(isolate),
+        fp_registers_(CanUseFPRegisters()),
+        function_mode_(function_mode) {}
 
   static void GenerateAheadOfTime(Isolate* isolate);
 
@@ -2470,7 +2541,7 @@ class StubFailureTrampolineStub : public PlatformCodeStub {
 
 class ProfileEntryHookStub : public PlatformCodeStub {
  public:
-  explicit ProfileEntryHookStub() {}
+  explicit ProfileEntryHookStub(Isolate* isolate) : PlatformCodeStub(isolate) {}
 
   // The profile entry hook function is not allowed to cause a GC.
   virtual bool SometimesSetsUpAFrame() { return false; }
index 5517deef4d2dcb3802f3e08601ea3e7814da8f7a..051a31f1b725853814cbb2b31eeb9fab80360576 100644 (file)
@@ -1780,7 +1780,8 @@ void Deoptimizer::DoComputeCompiledStubFrame(TranslationIterator* iterator,
   // Compute this frame's PC, state, and continuation.
   Code* trampoline = NULL;
   StubFunctionMode function_mode = descriptor->function_mode_;
-  StubFailureTrampolineStub(function_mode).FindCodeInCache(&trampoline,
+  StubFailureTrampolineStub(isolate_,
+                            function_mode).FindCodeInCache(&trampoline,
                                                            isolate_);
   ASSERT(trampoline != NULL);
   output_frame->SetPc(reinterpret_cast<intptr_t>(
index 73526d84fee732c75ae00129c26334d88c992936..571aa8d1e0009ed131b4ca95ea94a66adb6da838 100644 (file)
@@ -1403,13 +1403,13 @@ Address StubFailureTrampolineFrame::GetCallerStackPointer() const {
 
 Code* StubFailureTrampolineFrame::unchecked_code() const {
   Code* trampoline;
-  StubFailureTrampolineStub(NOT_JS_FUNCTION_STUB_MODE).
+  StubFailureTrampolineStub(isolate(), NOT_JS_FUNCTION_STUB_MODE).
       FindCodeInCache(&trampoline, isolate());
   if (trampoline->contains(pc())) {
     return trampoline;
   }
 
-  StubFailureTrampolineStub(JS_FUNCTION_STUB_MODE).
+  StubFailureTrampolineStub(isolate(), JS_FUNCTION_STUB_MODE).
       FindCodeInCache(&trampoline, isolate());
   if (trampoline->contains(pc())) {
     return trampoline;
index 858dbf4343ef64b74cd2324c310cfe04e7392928..7f23886ec52b889ed205ffa3d79d9588b701954b 100644 (file)
@@ -2779,13 +2779,13 @@ bool Heap::CreateApiObjects() {
 
 
 void Heap::CreateJSEntryStub() {
-  JSEntryStub stub;
+  JSEntryStub stub(isolate());
   set_js_entry_code(*stub.GetCode(isolate()));
 }
 
 
 void Heap::CreateJSConstructEntryStub() {
-  JSConstructEntryStub stub;
+  JSConstructEntryStub stub(isolate());
   set_js_construct_entry_code(*stub.GetCode(isolate()));
 }
 
index 331aac8f5fa3306415572f51b78b81198367fc17..b0c0946cd4ac9ed5187ab32c56df9c4835239f5e 100644 (file)
@@ -8008,7 +8008,7 @@ bool HOptimizedGraphBuilder::TryInlineApiCall(Handle<JSFunction> function,
   CallInterfaceDescriptor* descriptor =
       isolate()->call_descriptor(Isolate::ApiFunctionCall);
 
-  CallApiFunctionStub stub(is_store, call_data_is_undefined, argc);
+  CallApiFunctionStub stub(isolate(), is_store, call_data_is_undefined, argc);
   Handle<Code> code = stub.GetCode(isolate());
   HConstant* code_value = Add<HConstant>(code);
 
index 12ab3b47f32b74ededd6c4fd8e874acee3fdb43f..efef4a2441b836df3074feea6e6b28917832a89b 100644 (file)
@@ -540,7 +540,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
     if (is_construct) {
       // No type feedback cell is available
       __ mov(ebx, masm->isolate()->factory()->undefined_value());
-      CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
+      CallConstructStub stub(masm->isolate(), NO_CALL_FUNCTION_FLAGS);
       __ CallStub(&stub);
     } else {
       ParameterCount actual(eax);
index 80f3195c6f286232c5fc97649d8760795fdfda3d..39cb3b833e5c82f717154b321fdca6083f39adb9 100644 (file)
@@ -482,10 +482,9 @@ void CallDescriptors::InitializeForIsolate(Isolate* isolate) {
 
 void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) {
   // Update the static counter each time a new code stub is generated.
-  Isolate* isolate = masm->isolate();
-  isolate->counters()->code_stubs()->Increment();
+  isolate()->counters()->code_stubs()->Increment();
 
-  CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(isolate);
+  CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(isolate());
   int param_count = descriptor->register_param_count_;
   {
     // Call the runtime system in a fresh internal frame.
@@ -522,9 +521,9 @@ void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
   AllowExternalCallThatCantCauseGC scope(masm);
   __ PrepareCallCFunction(argument_count, ecx);
   __ mov(Operand(esp, 0 * kPointerSize),
-         Immediate(ExternalReference::isolate_address(masm->isolate())));
+         Immediate(ExternalReference::isolate_address(isolate())));
   __ CallCFunction(
-      ExternalReference::store_buffer_overflow_function(masm->isolate()),
+      ExternalReference::store_buffer_overflow_function(isolate()),
       argument_count);
   if (save_doubles_ == kSaveFPRegs) {
     CpuFeatureScope scope(masm, SSE2);
@@ -781,7 +780,7 @@ void FloatingPointHelper::CheckFloatOperands(MacroAssembler* masm,
 
 void MathPowStub::Generate(MacroAssembler* masm) {
   CpuFeatureScope use_sse2(masm, SSE2);
-  Factory* factory = masm->isolate()->factory();
+  Factory* factory = isolate()->factory();
   const Register exponent = eax;
   const Register base = edx;
   const Register scratch = ecx;
@@ -1010,7 +1009,7 @@ void MathPowStub::Generate(MacroAssembler* masm) {
   __ Cvtsi2sd(double_exponent, exponent);
 
   // Returning or bailing out.
-  Counters* counters = masm->isolate()->counters();
+  Counters* counters = isolate()->counters();
   if (exponent_type_ == ON_STACK) {
     // The arguments are still on the stack.
     __ bind(&call_runtime);
@@ -1031,7 +1030,7 @@ void MathPowStub::Generate(MacroAssembler* masm) {
       __ movsd(Operand(esp, 0 * kDoubleSize), double_base);
       __ movsd(Operand(esp, 1 * kDoubleSize), double_exponent);
       __ CallCFunction(
-          ExternalReference::power_double_double_function(masm->isolate()), 4);
+          ExternalReference::power_double_double_function(isolate()), 4);
     }
     // Return value is in st(0) on ia32.
     // Store it into the (fixed) result register.
@@ -1056,7 +1055,7 @@ void FunctionPrototypeStub::Generate(MacroAssembler* masm) {
   Label miss;
 
   if (kind() == Code::KEYED_LOAD_IC) {
-    __ cmp(ecx, Immediate(masm->isolate()->factory()->prototype_string()));
+    __ cmp(ecx, Immediate(isolate()->factory()->prototype_string()));
     __ j(not_equal, &miss);
   }
 
@@ -1152,8 +1151,6 @@ void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) {
 
 
 void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
-  Isolate* isolate = masm->isolate();
-
   // esp[0] : return address
   // esp[4] : number of parameters (tagged)
   // esp[8] : receiver displacement
@@ -1285,7 +1282,7 @@ void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
   __ j(zero, &skip_parameter_map);
 
   __ mov(FieldOperand(edi, FixedArray::kMapOffset),
-         Immediate(isolate->factory()->sloppy_arguments_elements_map()));
+         Immediate(isolate()->factory()->sloppy_arguments_elements_map()));
   __ lea(eax, Operand(ebx, reinterpret_cast<intptr_t>(Smi::FromInt(2))));
   __ mov(FieldOperand(edi, FixedArray::kLengthOffset), eax);
   __ mov(FieldOperand(edi, FixedArray::kHeaderSize + 0 * kPointerSize), esi);
@@ -1306,7 +1303,7 @@ void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
   __ mov(ebx, Immediate(Smi::FromInt(Context::MIN_CONTEXT_SLOTS)));
   __ add(ebx, Operand(esp, 4 * kPointerSize));
   __ sub(ebx, eax);
-  __ mov(ecx, isolate->factory()->the_hole_value());
+  __ mov(ecx, isolate()->factory()->the_hole_value());
   __ mov(edx, edi);
   __ lea(edi, Operand(edi, eax, times_2, kParameterMapHeaderSize));
   // eax = loop variable (tagged)
@@ -1341,7 +1338,7 @@ void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
   // esp[16] = address of receiver argument
   // Copy arguments header and remaining slots (if there are any).
   __ mov(FieldOperand(edi, FixedArray::kMapOffset),
-         Immediate(isolate->factory()->fixed_array_map()));
+         Immediate(isolate()->factory()->fixed_array_map()));
   __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx);
 
   Label arguments_loop, arguments_test;
@@ -1377,8 +1374,6 @@ void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
 
 
 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
-  Isolate* isolate = masm->isolate();
-
   // esp[0] : return address
   // esp[4] : number of parameters
   // esp[8] : receiver displacement
@@ -1449,7 +1444,7 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
   __ lea(edi, Operand(eax, Heap::kStrictArgumentsObjectSize));
   __ mov(FieldOperand(eax, JSObject::kElementsOffset), edi);
   __ mov(FieldOperand(edi, FixedArray::kMapOffset),
-         Immediate(isolate->factory()->fixed_array_map()));
+         Immediate(isolate()->factory()->fixed_array_map()));
 
   __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx);
   // Untag the length for the loop below.
@@ -1496,14 +1491,13 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
   static const int kJSRegExpOffset = 4 * kPointerSize;
 
   Label runtime;
-  Factory* factory = masm->isolate()->factory();
+  Factory* factory = isolate()->factory();
 
   // Ensure that a RegExp stack is allocated.
   ExternalReference address_of_regexp_stack_memory_address =
-      ExternalReference::address_of_regexp_stack_memory_address(
-          masm->isolate());
+      ExternalReference::address_of_regexp_stack_memory_address(isolate());
   ExternalReference address_of_regexp_stack_memory_size =
-      ExternalReference::address_of_regexp_stack_memory_size(masm->isolate());
+      ExternalReference::address_of_regexp_stack_memory_size(isolate());
   __ mov(ebx, Operand::StaticVariable(address_of_regexp_stack_memory_size));
   __ test(ebx, ebx);
   __ j(zero, &runtime);
@@ -1652,7 +1646,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
   // edx: code
   // ecx: encoding of subject string (1 if ASCII, 0 if two_byte);
   // All checks done. Now push arguments for native regexp code.
-  Counters* counters = masm->isolate()->counters();
+  Counters* counters = isolate()->counters();
   __ IncrementCounter(counters->regexp_entry_native(), 1);
 
   // Isolates: note we add an additional parameter here (isolate pointer).
@@ -1661,7 +1655,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
 
   // Argument 9: Pass current isolate address.
   __ mov(Operand(esp, 8 * kPointerSize),
-      Immediate(ExternalReference::isolate_address(masm->isolate())));
+      Immediate(ExternalReference::isolate_address(isolate())));
 
   // Argument 8: Indicate that this is a direct call from JavaScript.
   __ mov(Operand(esp, 7 * kPointerSize), Immediate(1));
@@ -1678,7 +1672,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
   // Argument 5: static offsets vector buffer.
   __ mov(Operand(esp, 4 * kPointerSize),
          Immediate(ExternalReference::address_of_static_offsets_vector(
-             masm->isolate())));
+             isolate())));
 
   // Argument 2: Previous index.
   __ SmiUntag(ebx);
@@ -1752,8 +1746,8 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
   // haven't created the exception yet. Handle that in the runtime system.
   // TODO(592): Rerunning the RegExp to get the stack overflow exception.
   ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
-                                      masm->isolate());
-  __ mov(edx, Immediate(masm->isolate()->factory()->the_hole_value()));
+                                      isolate());
+  __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
   __ mov(eax, Operand::StaticVariable(pending_exception));
   __ cmp(edx, eax);
   __ j(equal, &runtime);
@@ -1834,7 +1828,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
 
   // Get the static offsets vector filled by the native regexp code.
   ExternalReference address_of_static_offsets_vector =
-      ExternalReference::address_of_static_offsets_vector(masm->isolate());
+      ExternalReference::address_of_static_offsets_vector(isolate());
   __ mov(ecx, Immediate(address_of_static_offsets_vector));
 
   // ebx: last_match_info backing store (FixedArray)
@@ -2000,7 +1994,7 @@ void ICCompareStub::GenerateGeneric(MacroAssembler* masm) {
       // Check for undefined.  undefined OP undefined is false even though
       // undefined == undefined.
       Label check_for_nan;
-      __ cmp(edx, masm->isolate()->factory()->undefined_value());
+      __ cmp(edx, isolate()->factory()->undefined_value());
       __ j(not_equal, &check_for_nan, Label::kNear);
       __ Move(eax, Immediate(Smi::FromInt(NegativeComparisonResult(cc))));
       __ ret(0);
@@ -2010,7 +2004,7 @@ void ICCompareStub::GenerateGeneric(MacroAssembler* masm) {
     // Test for NaN. Compare heap numbers in a general way,
     // to hanlde NaNs correctly.
     __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
-           Immediate(masm->isolate()->factory()->heap_number_map()));
+           Immediate(isolate()->factory()->heap_number_map()));
     __ j(equal, &generic_heap_number_comparison, Label::kNear);
     if (cc != equal) {
       // Call runtime on identical JSObjects.  Otherwise return equal.
@@ -2055,7 +2049,7 @@ void ICCompareStub::GenerateGeneric(MacroAssembler* masm) {
 
     // Check if the non-smi operand is a heap number.
     __ cmp(FieldOperand(ebx, HeapObject::kMapOffset),
-           Immediate(masm->isolate()->factory()->heap_number_map()));
+           Immediate(isolate()->factory()->heap_number_map()));
     // If heap number, handle it in the slow case.
     __ j(equal, &slow, Label::kNear);
     // Return non-equal (ebx is not zero)
@@ -2284,8 +2278,7 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
     // If we didn't have a matching function, and we didn't find the megamorph
     // sentinel, then we have in the slot either some other function or an
     // AllocationSite. Do a map check on the object in ecx.
-    Handle<Map> allocation_site_map =
-        masm->isolate()->factory()->allocation_site_map();
+    Handle<Map> allocation_site_map = isolate->factory()->allocation_site_map();
     __ cmp(FieldOperand(ecx, 0), Immediate(allocation_site_map));
     __ j(not_equal, &miss);
 
@@ -2332,7 +2325,7 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
       __ push(edx);
       __ push(ebx);
 
-      CreateAllocationSiteStub create_stub;
+      CreateAllocationSiteStub create_stub(isolate);
       __ CallStub(&create_stub);
 
       __ pop(ebx);
@@ -2368,7 +2361,6 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
   // edx : (only if ebx is not the megamorphic symbol) slot in feedback
   //       vector (Smi)
   // edi : the function to call
-  Isolate* isolate = masm->isolate();
   Label slow, non_function, wrap, cont;
 
   if (NeedsChecks()) {
@@ -2384,7 +2376,7 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
       // Type information was updated. Because we may call Array, which
       // expects either undefined or an AllocationSite in ebx we need
       // to set ebx to undefined.
-      __ mov(ebx, Immediate(isolate->factory()->undefined_value()));
+      __ mov(ebx, Immediate(isolate()->factory()->undefined_value()));
     }
   }
 
@@ -2431,7 +2423,7 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
       // object (megamorphic symbol) so no write barrier is needed.
       __ mov(FieldOperand(ebx, edx, times_half_pointer_size,
                           FixedArray::kHeaderSize),
-             Immediate(TypeFeedbackInfo::MegamorphicSentinel(isolate)));
+             Immediate(TypeFeedbackInfo::MegamorphicSentinel(isolate())));
     }
     // Check for function proxy.
     __ CmpInstanceType(ecx, JS_FUNCTION_PROXY_TYPE);
@@ -2443,7 +2435,8 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
     __ Move(ebx, Immediate(0));
     __ GetBuiltinEntry(edx, Builtins::CALL_FUNCTION_PROXY);
     {
-      Handle<Code> adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline();
+      Handle<Code> adaptor =
+          isolate()->builtins()->ArgumentsAdaptorTrampoline();
       __ jmp(adaptor, RelocInfo::CODE_TARGET);
     }
 
@@ -2454,7 +2447,7 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
     __ Move(eax, Immediate(argc_));
     __ Move(ebx, Immediate(0));
     __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION);
-    Handle<Code> adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline();
+    Handle<Code> adaptor = isolate()->builtins()->ArgumentsAdaptorTrampoline();
     __ jmp(adaptor, RelocInfo::CODE_TARGET);
   }
 
@@ -2502,10 +2495,10 @@ void CallConstructStub::Generate(MacroAssembler* masm) {
       __ mov(ebx, FieldOperand(ebx, edx, times_half_pointer_size,
                                FixedArray::kHeaderSize));
       Handle<Map> allocation_site_map =
-          masm->isolate()->factory()->allocation_site_map();
+          isolate()->factory()->allocation_site_map();
       __ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map));
       __ j(equal, &feedback_register_initialized);
-      __ mov(ebx, masm->isolate()->factory()->undefined_value());
+      __ mov(ebx, isolate()->factory()->undefined_value());
       __ bind(&feedback_register_initialized);
     }
 
@@ -2536,7 +2529,7 @@ void CallConstructStub::Generate(MacroAssembler* masm) {
   // Set expected number of arguments to zero (not changing eax).
   __ Move(ebx, Immediate(0));
   Handle<Code> arguments_adaptor =
-      masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
+      isolate()->builtins()->ArgumentsAdaptorTrampoline();
   __ jmp(arguments_adaptor, RelocInfo::CODE_TARGET);
 }
 
@@ -2566,7 +2559,7 @@ void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
 
 void CodeStub::GenerateFPStubs(Isolate* isolate) {
   if (CpuFeatures::IsSupported(SSE2)) {
-    CEntryStub save_doubles(1, kSaveFPRegs);
+    CEntryStub save_doubles(isolate, 1, kSaveFPRegs);
     // Stubs might already be in the snapshot, detect that and don't regenerate,
     // which would lead to code stub initialization state being messed up.
     Code* save_doubles_code;
@@ -2579,7 +2572,7 @@ void CodeStub::GenerateFPStubs(Isolate* isolate) {
 
 
 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
-  CEntryStub stub(1, kDontSaveFPRegs);
+  CEntryStub stub(isolate, 1, kDontSaveFPRegs);
   stub.GetCode(isolate);
 }
 
@@ -2605,8 +2598,6 @@ void CEntryStub::Generate(MacroAssembler* masm) {
 
   // Result returned in eax, or eax+edx if result_size_ is 2.
 
-  Isolate* isolate = masm->isolate();
-
   // Check stack alignment.
   if (FLAG_debug_code) {
     __ CheckStackAlignment();
@@ -2616,7 +2607,7 @@ void CEntryStub::Generate(MacroAssembler* masm) {
   __ mov(Operand(esp, 0 * kPointerSize), edi);  // argc.
   __ mov(Operand(esp, 1 * kPointerSize), esi);  // argv.
   __ mov(Operand(esp, 2 * kPointerSize),
-         Immediate(ExternalReference::isolate_address(isolate)));
+         Immediate(ExternalReference::isolate_address(isolate())));
   __ call(ebx);
   // Result is in eax or edx:eax - do not destroy these registers!
 
@@ -2624,7 +2615,7 @@ void CEntryStub::Generate(MacroAssembler* masm) {
   // lead to crashes in the IC code later.
   if (FLAG_debug_code) {
     Label okay;
-    __ cmp(eax, isolate->factory()->the_hole_value());
+    __ cmp(eax, isolate()->factory()->the_hole_value());
     __ j(not_equal, &okay, Label::kNear);
     __ int3();
     __ bind(&okay);
@@ -2632,17 +2623,17 @@ void CEntryStub::Generate(MacroAssembler* masm) {
 
   // Check result for exception sentinel.
   Label exception_returned;
-  __ cmp(eax, isolate->factory()->exception());
+  __ cmp(eax, isolate()->factory()->exception());
   __ j(equal, &exception_returned);
 
   ExternalReference pending_exception_address(
-      Isolate::kPendingExceptionAddress, isolate);
+      Isolate::kPendingExceptionAddress, isolate());
 
   // Check that there is no pending exception, otherwise we
   // should have returned the exception sentinel.
   if (FLAG_debug_code) {
     __ push(edx);
-    __ mov(edx, Immediate(isolate->factory()->the_hole_value()));
+    __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
     Label okay;
     __ cmp(edx, Operand::StaticVariable(pending_exception_address));
     // Cannot use check here as it attempts to generate call into runtime.
@@ -2663,13 +2654,13 @@ void CEntryStub::Generate(MacroAssembler* masm) {
   __ mov(eax, Operand::StaticVariable(pending_exception_address));
 
   // Clear the pending exception.
-  __ mov(edx, Immediate(isolate->factory()->the_hole_value()));
+  __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
   __ mov(Operand::StaticVariable(pending_exception_address), edx);
 
   // Special handling of termination exceptions which are uncatchable
   // by javascript code.
   Label throw_termination_exception;
-  __ cmp(eax, isolate->factory()->termination_exception());
+  __ cmp(eax, isolate()->factory()->termination_exception());
   __ j(equal, &throw_termination_exception);
 
   // Handle normal exception.
@@ -2700,12 +2691,11 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
   __ push(ebx);
 
   // Save copies of the top frame descriptor on the stack.
-  ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, masm->isolate());
+  ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, isolate());
   __ push(Operand::StaticVariable(c_entry_fp));
 
   // If this is the outermost JS call, set js_entry_sp value.
-  ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress,
-                                masm->isolate());
+  ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate());
   __ cmp(Operand::StaticVariable(js_entry_sp), Immediate(0));
   __ j(not_equal, &not_outermost_js, Label::kNear);
   __ mov(Operand::StaticVariable(js_entry_sp), ebp);
@@ -2722,9 +2712,9 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
   // Caught exception: Store result (exception) in the pending exception
   // field in the JSEnv and return a failure sentinel.
   ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
-                                      masm->isolate());
+                                      isolate());
   __ mov(Operand::StaticVariable(pending_exception), eax);
-  __ mov(eax, Immediate(masm->isolate()->factory()->exception()));
+  __ mov(eax, Immediate(isolate()->factory()->exception()));
   __ jmp(&exit);
 
   // Invoke: Link this frame into the handler chain.  There's only one
@@ -2733,7 +2723,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
   __ PushTryHandler(StackHandler::JS_ENTRY, 0);
 
   // Clear any pending exceptions.
-  __ mov(edx, Immediate(masm->isolate()->factory()->the_hole_value()));
+  __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
   __ mov(Operand::StaticVariable(pending_exception), edx);
 
   // Fake a receiver (NULL).
@@ -2745,11 +2735,10 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
   // builtin stubs may not have been generated yet.
   if (is_construct) {
     ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
-                                      masm->isolate());
+                                      isolate());
     __ mov(edx, Immediate(construct_entry));
   } else {
-    ExternalReference entry(Builtins::kJSEntryTrampoline,
-                            masm->isolate());
+    ExternalReference entry(Builtins::kJSEntryTrampoline, isolate());
     __ mov(edx, Immediate(entry));
   }
   __ mov(edx, Operand(edx, 0));  // deref address
@@ -2769,8 +2758,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
 
   // Restore the top frame descriptor from the stack.
   __ pop(Operand::StaticVariable(ExternalReference(
-      Isolate::kCEntryFPAddress,
-      masm->isolate())));
+      Isolate::kCEntryFPAddress, isolate())));
 
   // Restore callee-saved registers (C calling conventions).
   __ pop(ebx);
@@ -2883,7 +2871,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
   __ bind(&loop);
   __ cmp(scratch, prototype);
   __ j(equal, &is_instance, Label::kNear);
-  Factory* factory = masm->isolate()->factory();
+  Factory* factory = isolate()->factory();
   __ cmp(scratch, Immediate(factory->null_value()));
   __ j(equal, &is_not_instance, Label::kNear);
   __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
@@ -3295,7 +3283,7 @@ void SubStringStub::Generate(MacroAssembler* masm) {
   // Longer than original string's length or negative: unsafe arguments.
   __ j(above, &runtime);
   // Return original string.
-  Counters* counters = masm->isolate()->counters();
+  Counters* counters = isolate()->counters();
   __ IncrementCounter(counters->sub_string_native(), 1);
   __ ret(3 * kPointerSize);
   __ bind(&not_original_string);
@@ -3317,7 +3305,7 @@ void SubStringStub::Generate(MacroAssembler* masm) {
   __ test(ebx, Immediate(kIsIndirectStringMask));
   __ j(zero, &seq_or_external_string, Label::kNear);
 
-  Factory* factory = masm->isolate()->factory();
+  Factory* factory = isolate()->factory();
   __ test(ebx, Immediate(kSlicedNotConsMask));
   __ j(not_zero, &sliced_string, Label::kNear);
   // Cons string.  Check whether it is flat, then fetch first part.
@@ -3637,7 +3625,7 @@ void StringCompareStub::Generate(MacroAssembler* masm) {
   STATIC_ASSERT(EQUAL == 0);
   STATIC_ASSERT(kSmiTag == 0);
   __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
-  __ IncrementCounter(masm->isolate()->counters()->string_compare_native(), 1);
+  __ IncrementCounter(isolate()->counters()->string_compare_native(), 1);
   __ ret(2 * kPointerSize);
 
   __ bind(&not_same);
@@ -3665,25 +3653,24 @@ void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
   //  -- eax    : right
   //  -- esp[0] : return address
   // -----------------------------------
-  Isolate* isolate = masm->isolate();
 
   // Load ecx with the allocation site.  We stick an undefined dummy value here
   // and replace it with the real allocation site later when we instantiate this
   // stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate().
-  __ mov(ecx, handle(isolate->heap()->undefined_value()));
+  __ mov(ecx, handle(isolate()->heap()->undefined_value()));
 
   // Make sure that we actually patched the allocation site.
   if (FLAG_debug_code) {
     __ test(ecx, Immediate(kSmiTagMask));
     __ Assert(not_equal, kExpectedAllocationSite);
     __ cmp(FieldOperand(ecx, HeapObject::kMapOffset),
-           isolate->factory()->allocation_site_map());
+           isolate()->factory()->allocation_site_map());
     __ Assert(equal, kExpectedAllocationSite);
   }
 
   // Tail call into the stub that handles binary operations with allocation
   // sites.
-  BinaryOpWithAllocationSiteStub stub(state_);
+  BinaryOpWithAllocationSiteStub stub(isolate(), state_);
   __ TailCallStub(&stub);
 }
 
@@ -3738,7 +3725,7 @@ void ICCompareStub::GenerateNumbers(MacroAssembler* masm) {
     Label done, left, left_smi, right_smi;
     __ JumpIfSmi(eax, &right_smi, Label::kNear);
     __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
-           masm->isolate()->factory()->heap_number_map());
+           isolate()->factory()->heap_number_map());
     __ j(not_equal, &maybe_undefined1, Label::kNear);
     __ movsd(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
     __ jmp(&left, Label::kNear);
@@ -3750,7 +3737,7 @@ void ICCompareStub::GenerateNumbers(MacroAssembler* masm) {
     __ bind(&left);
     __ JumpIfSmi(edx, &left_smi, Label::kNear);
     __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
-           masm->isolate()->factory()->heap_number_map());
+           isolate()->factory()->heap_number_map());
     __ j(not_equal, &maybe_undefined2, Label::kNear);
     __ movsd(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
     __ jmp(&done);
@@ -3780,22 +3767,22 @@ void ICCompareStub::GenerateNumbers(MacroAssembler* masm) {
     __ JumpIfSmi(ecx, &generic_stub, Label::kNear);
 
     __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
-           masm->isolate()->factory()->heap_number_map());
+           isolate()->factory()->heap_number_map());
     __ j(not_equal, &maybe_undefined1, Label::kNear);
     __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
-           masm->isolate()->factory()->heap_number_map());
+           isolate()->factory()->heap_number_map());
     __ j(not_equal, &maybe_undefined2, Label::kNear);
   }
 
   __ bind(&unordered);
   __ bind(&generic_stub);
-  ICCompareStub stub(op_, CompareIC::GENERIC, CompareIC::GENERIC,
+  ICCompareStub stub(isolate(), op_, CompareIC::GENERIC, CompareIC::GENERIC,
                      CompareIC::GENERIC);
-  __ jmp(stub.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
+  __ jmp(stub.GetCode(isolate()), RelocInfo::CODE_TARGET);
 
   __ bind(&maybe_undefined1);
   if (Token::IsOrderedRelationalCompareOp(op_)) {
-    __ cmp(eax, Immediate(masm->isolate()->factory()->undefined_value()));
+    __ cmp(eax, Immediate(isolate()->factory()->undefined_value()));
     __ j(not_equal, &miss);
     __ JumpIfSmi(edx, &unordered);
     __ CmpObjectType(edx, HEAP_NUMBER_TYPE, ecx);
@@ -3805,7 +3792,7 @@ void ICCompareStub::GenerateNumbers(MacroAssembler* masm) {
 
   __ bind(&maybe_undefined2);
   if (Token::IsOrderedRelationalCompareOp(op_)) {
-    __ cmp(edx, Immediate(masm->isolate()->factory()->undefined_value()));
+    __ cmp(edx, Immediate(isolate()->factory()->undefined_value()));
     __ j(equal, &unordered);
   }
 
@@ -4040,7 +4027,7 @@ void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
   {
     // Call the runtime system in a fresh internal frame.
     ExternalReference miss = ExternalReference(IC_Utility(IC::kCompareIC_Miss),
-                                               masm->isolate());
+                                               isolate());
     FrameScope scope(masm, StackFrame::INTERNAL);
     __ push(edx);  // Preserve edx and eax.
     __ push(eax);
@@ -4114,7 +4101,8 @@ void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
     __ bind(&good);
   }
 
-  NameDictionaryLookupStub stub(properties, r0, r0, NEGATIVE_LOOKUP);
+  NameDictionaryLookupStub stub(masm->isolate(), properties, r0, r0,
+                                NEGATIVE_LOOKUP);
   __ push(Immediate(Handle<Object>(name)));
   __ push(Immediate(name->Hash()));
   __ CallStub(&stub);
@@ -4170,7 +4158,8 @@ void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
     __ j(equal, done);
   }
 
-  NameDictionaryLookupStub stub(elements, r1, r0, POSITIVE_LOOKUP);
+  NameDictionaryLookupStub stub(masm->isolate(), elements, r1, r0,
+                                POSITIVE_LOOKUP);
   __ push(name);
   __ mov(r0, FieldOperand(name, Name::kHashFieldOffset));
   __ shr(r0, Name::kHashShift);
@@ -4230,7 +4219,7 @@ void NameDictionaryLookupStub::Generate(MacroAssembler* masm) {
                             index_,
                             times_pointer_size,
                             kElementsStartOffset - kHeapObjectTag));
-    __ cmp(scratch, masm->isolate()->factory()->undefined_value());
+    __ cmp(scratch, isolate()->factory()->undefined_value());
     __ j(equal, &not_in_dictionary);
 
     // Stop if found the property.
@@ -4273,10 +4262,10 @@ void NameDictionaryLookupStub::Generate(MacroAssembler* masm) {
 
 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
     Isolate* isolate) {
-  StoreBufferOverflowStub stub(kDontSaveFPRegs);
+  StoreBufferOverflowStub stub(isolate, kDontSaveFPRegs);
   stub.GetCode(isolate);
   if (CpuFeatures::IsSafeForSnapshot(SSE2)) {
-    StoreBufferOverflowStub stub2(kSaveFPRegs);
+    StoreBufferOverflowStub stub2(isolate, kSaveFPRegs);
     stub2.GetCode(isolate);
   }
 }
@@ -4376,12 +4365,11 @@ void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) {
   __ mov(Operand(esp, 0 * kPointerSize), regs_.object());
   __ mov(Operand(esp, 1 * kPointerSize), regs_.address());  // Slot.
   __ mov(Operand(esp, 2 * kPointerSize),
-         Immediate(ExternalReference::isolate_address(masm->isolate())));
+         Immediate(ExternalReference::isolate_address(isolate())));
 
   AllowExternalCallThatCantCauseGC scope(masm);
   __ CallCFunction(
-      ExternalReference::incremental_marking_record_write_function(
-          masm->isolate()),
+      ExternalReference::incremental_marking_record_write_function(isolate()),
       argument_count);
 
   regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_);
@@ -4568,8 +4556,8 @@ void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
 
 
 void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
-  CEntryStub ces(1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs);
-  __ call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
+  CEntryStub ces(isolate(), 1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs);
+  __ call(ces.GetCode(isolate()), RelocInfo::CODE_TARGET);
   int parameter_count_offset =
       StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset;
   __ mov(ebx, MemOperand(ebp, parameter_count_offset));
@@ -4585,7 +4573,7 @@ void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
 
 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
   if (masm->isolate()->function_entry_hook() != NULL) {
-    ProfileEntryHookStub stub;
+    ProfileEntryHookStub stub(masm->isolate());
     masm->CallStub(&stub);
   }
 }
@@ -4609,8 +4597,8 @@ void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
   __ push(eax);
 
   // Call the entry hook.
-  ASSERT(masm->isolate()->function_entry_hook() != NULL);
-  __ call(FUNCTION_ADDR(masm->isolate()->function_entry_hook()),
+  ASSERT(isolate()->function_entry_hook() != NULL);
+  __ call(FUNCTION_ADDR(isolate()->function_entry_hook()),
           RelocInfo::RUNTIME_ENTRY);
   __ add(esp, Immediate(2 * kPointerSize));
 
@@ -4627,7 +4615,8 @@ template<class T>
 static void CreateArrayDispatch(MacroAssembler* masm,
                                 AllocationSiteOverrideMode mode) {
   if (mode == DISABLE_ALLOCATION_SITES) {
-    T stub(GetInitialFastElementsKind(),
+    T stub(masm->isolate(),
+           GetInitialFastElementsKind(),
            mode);
     __ TailCallStub(&stub);
   } else if (mode == DONT_OVERRIDE) {
@@ -4638,7 +4627,7 @@ static void CreateArrayDispatch(MacroAssembler* masm,
       ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
       __ cmp(edx, kind);
       __ j(not_equal, &next);
-      T stub(kind);
+      T stub(masm->isolate(), kind);
       __ TailCallStub(&stub);
       __ bind(&next);
     }
@@ -4682,12 +4671,14 @@ static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
     ElementsKind initial = GetInitialFastElementsKind();
     ElementsKind holey_initial = GetHoleyElementsKind(initial);
 
-    ArraySingleArgumentConstructorStub stub_holey(holey_initial,
+    ArraySingleArgumentConstructorStub stub_holey(masm->isolate(),
+                                                  holey_initial,
                                                   DISABLE_ALLOCATION_SITES);
     __ TailCallStub(&stub_holey);
 
     __ bind(&normal_sequence);
-    ArraySingleArgumentConstructorStub stub(initial,
+    ArraySingleArgumentConstructorStub stub(masm->isolate(),
+                                            initial,
                                             DISABLE_ALLOCATION_SITES);
     __ TailCallStub(&stub);
   } else if (mode == DONT_OVERRIDE) {
@@ -4717,7 +4708,7 @@ static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
       ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
       __ cmp(edx, kind);
       __ j(not_equal, &next);
-      ArraySingleArgumentConstructorStub stub(kind);
+      ArraySingleArgumentConstructorStub stub(masm->isolate(), kind);
       __ TailCallStub(&stub);
       __ bind(&next);
     }
@@ -4736,10 +4727,10 @@ static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
       TERMINAL_FAST_ELEMENTS_KIND);
   for (int i = 0; i <= to_index; ++i) {
     ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
-    T stub(kind);
+    T stub(isolate, kind);
     stub.GetCode(isolate);
     if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) {
-      T stub1(kind, DISABLE_ALLOCATION_SITES);
+      T stub1(isolate, kind, DISABLE_ALLOCATION_SITES);
       stub1.GetCode(isolate);
     }
   }
@@ -4761,11 +4752,11 @@ void InternalArrayConstructorStubBase::GenerateStubsAheadOfTime(
   ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS };
   for (int i = 0; i < 2; i++) {
     // For internal arrays we only need a few things
-    InternalArrayNoArgumentConstructorStub stubh1(kinds[i]);
+    InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[i]);
     stubh1.GetCode(isolate);
-    InternalArraySingleArgumentConstructorStub stubh2(kinds[i]);
+    InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]);
     stubh2.GetCode(isolate);
-    InternalArrayNArgumentsConstructorStub stubh3(kinds[i]);
+    InternalArrayNArgumentsConstructorStub stubh3(isolate, kinds[i]);
     stubh3.GetCode(isolate);
   }
 }
@@ -4826,7 +4817,7 @@ void ArrayConstructorStub::Generate(MacroAssembler* masm) {
   Label no_info;
   // If the feedback vector is the undefined value call an array constructor
   // that doesn't use AllocationSites.
-  __ cmp(ebx, masm->isolate()->factory()->undefined_value());
+  __ cmp(ebx, isolate()->factory()->undefined_value());
   __ j(equal, &no_info);
 
   // Only look at the lower 16 bits of the transition info.
@@ -4848,7 +4839,7 @@ void InternalArrayConstructorStub::GenerateCase(
 
   __ test(eax, eax);
   __ j(not_zero, &not_zero_case);
-  InternalArrayNoArgumentConstructorStub stub0(kind);
+  InternalArrayNoArgumentConstructorStub stub0(isolate(), kind);
   __ TailCallStub(&stub0);
 
   __ bind(&not_zero_case);
@@ -4863,16 +4854,16 @@ void InternalArrayConstructorStub::GenerateCase(
     __ j(zero, &normal_sequence);
 
     InternalArraySingleArgumentConstructorStub
-        stub1_holey(GetHoleyElementsKind(kind));
+        stub1_holey(isolate(), GetHoleyElementsKind(kind));
     __ TailCallStub(&stub1_holey);
   }
 
   __ bind(&normal_sequence);
-  InternalArraySingleArgumentConstructorStub stub1(kind);
+  InternalArraySingleArgumentConstructorStub stub1(isolate(), kind);
   __ TailCallStub(&stub1);
 
   __ bind(&not_one_case);
-  InternalArrayNArgumentsConstructorStub stubN(kind);
+  InternalArrayNArgumentsConstructorStub stubN(isolate(), kind);
   __ TailCallStub(&stubN);
 }
 
@@ -4965,8 +4956,6 @@ void CallApiFunctionStub::Generate(MacroAssembler* masm) {
   STATIC_ASSERT(FCA::kHolderIndex == 0);
   STATIC_ASSERT(FCA::kArgsLength == 7);
 
-  Isolate* isolate = masm->isolate();
-
   __ pop(return_address);
 
   // context save
@@ -4983,9 +4972,9 @@ void CallApiFunctionStub::Generate(MacroAssembler* masm) {
   Register scratch = call_data;
   if (!call_data_undefined) {
     // return value
-    __ push(Immediate(isolate->factory()->undefined_value()));
+    __ push(Immediate(isolate()->factory()->undefined_value()));
     // return value default
-    __ push(Immediate(isolate->factory()->undefined_value()));
+    __ push(Immediate(isolate()->factory()->undefined_value()));
   } else {
     // return value
     __ push(scratch);
@@ -4993,7 +4982,7 @@ void CallApiFunctionStub::Generate(MacroAssembler* masm) {
     __ push(scratch);
   }
   // isolate
-  __ push(Immediate(reinterpret_cast<int>(isolate)));
+  __ push(Immediate(reinterpret_cast<int>(isolate())));
   // holder
   __ push(holder);
 
index cf20a11c6dc38b7f02a04132ee51664405e3a8dc..e4eef89667d2ea98282454fac6a31882b75d2c2d 100644 (file)
@@ -42,8 +42,8 @@ void ArrayNativeCode(MacroAssembler* masm,
 
 class StoreBufferOverflowStub: public PlatformCodeStub {
  public:
-  explicit StoreBufferOverflowStub(SaveFPRegsMode save_fp)
-      : save_doubles_(save_fp) {
+  StoreBufferOverflowStub(Isolate* isolate, SaveFPRegsMode save_fp)
+      : PlatformCodeStub(isolate), save_doubles_(save_fp) {
     ASSERT(CpuFeatures::IsSafeForSnapshot(SSE2) || save_fp == kDontSaveFPRegs);
   }
 
@@ -92,7 +92,7 @@ class StringHelper : public AllStatic {
 
 class SubStringStub: public PlatformCodeStub {
  public:
-  SubStringStub() {}
+  explicit SubStringStub(Isolate* isolate) : PlatformCodeStub(isolate) {}
 
  private:
   Major MajorKey() { return SubString; }
@@ -104,7 +104,7 @@ class SubStringStub: public PlatformCodeStub {
 
 class StringCompareStub: public PlatformCodeStub {
  public:
-  StringCompareStub() { }
+  explicit StringCompareStub(Isolate* isolate) : PlatformCodeStub(isolate) { }
 
   // Compares two flat ASCII strings and returns result in eax.
   static void GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
@@ -142,11 +142,13 @@ class NameDictionaryLookupStub: public PlatformCodeStub {
  public:
   enum LookupMode { POSITIVE_LOOKUP, NEGATIVE_LOOKUP };
 
-  NameDictionaryLookupStub(Register dictionary,
+  NameDictionaryLookupStub(Isolate* isolate,
+                           Register dictionary,
                            Register result,
                            Register index,
                            LookupMode mode)
-      : dictionary_(dictionary), result_(result), index_(index), mode_(mode) { }
+      : PlatformCodeStub(isolate),
+        dictionary_(dictionary), result_(result), index_(index), mode_(mode) { }
 
   void Generate(MacroAssembler* masm);
 
@@ -202,12 +204,14 @@ class NameDictionaryLookupStub: public PlatformCodeStub {
 
 class RecordWriteStub: public PlatformCodeStub {
  public:
-  RecordWriteStub(Register object,
+  RecordWriteStub(Isolate* isolate,
+                  Register object,
                   Register value,
                   Register address,
                   RememberedSetAction remembered_set_action,
                   SaveFPRegsMode fp_mode)
-      : object_(object),
+      : PlatformCodeStub(isolate),
+        object_(object),
         value_(value),
         address_(address),
         remembered_set_action_(remembered_set_action),
index 42284ec75c5546cb77a020f0452c1fa65c20e23e..456ee3efe6750fd09a6d2891e033b1e57ae460ef 100644 (file)
@@ -141,7 +141,7 @@ static void Generate_DebugBreakCallHelper(MacroAssembler* masm,
     __ Move(eax, Immediate(0));  // No arguments.
     __ mov(ebx, Immediate(ExternalReference::debug_break(masm->isolate())));
 
-    CEntryStub ceb(1);
+    CEntryStub ceb(masm->isolate(), 1);
     __ CallStub(&ceb);
 
     // Automatically find register that could be used after register restore.
index fbd9a218e056ede549221471e567e5929c96a2dd..789b646f2f340d16b96358473684aa8601b0640a 100644 (file)
@@ -229,7 +229,7 @@ void FullCodeGenerator::Generate() {
       __ Push(info->scope()->GetScopeInfo());
       __ CallRuntime(Runtime::kHiddenNewGlobalContext, 2);
     } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
-      FastNewContextStub stub(heap_slots);
+      FastNewContextStub stub(isolate(), heap_slots);
       __ CallStub(&stub);
     } else {
       __ push(edi);
@@ -291,7 +291,7 @@ void FullCodeGenerator::Generate() {
     } else {
       type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
     }
-    ArgumentsAccessStub stub(type);
+    ArgumentsAccessStub stub(isolate(), type);
     __ CallStub(&stub);
 
     SetVar(arguments, eax, ebx, edx);
@@ -1300,7 +1300,9 @@ void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
       !pretenure &&
       scope()->is_function_scope() &&
       info->num_literals() == 0) {
-    FastNewClosureStub stub(info->strict_mode(), info->is_generator());
+    FastNewClosureStub stub(isolate(),
+                            info->strict_mode(),
+                            info->is_generator());
     __ mov(ebx, Immediate(info));
     __ CallStub(&stub);
   } else {
@@ -1634,7 +1636,7 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
     __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
     __ mov(ecx, Immediate(constant_properties));
     __ mov(edx, Immediate(Smi::FromInt(flags)));
-    FastCloneShallowObjectStub stub(properties_count);
+    FastCloneShallowObjectStub stub(isolate(), properties_count);
     __ CallStub(&stub);
   }
 
@@ -1769,6 +1771,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
     __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
     __ mov(ecx, Immediate(constant_elements));
     FastCloneShallowArrayStub stub(
+        isolate(),
         FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
         allocation_site_mode,
         length);
@@ -1797,7 +1800,10 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
     __ mov(eax, FieldOperand(ebx, JSFunction::kLiteralsOffset));
     __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
     __ mov(ecx, Immediate(constant_elements));
-    FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
+    FastCloneShallowArrayStub stub(isolate(),
+                                   mode,
+                                   allocation_site_mode,
+                                   length);
     __ CallStub(&stub);
   }
 
@@ -1834,7 +1840,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
     } else {
       // Store the subexpression value in the array's elements.
       __ mov(ecx, Immediate(Smi::FromInt(i)));
-      StoreArrayLiteralElementStub stub;
+      StoreArrayLiteralElementStub stub(isolate());
       __ CallStub(&stub);
     }
 
@@ -2083,7 +2089,7 @@ void FullCodeGenerator::VisitYield(Yield* expr) {
       CallIC(ic, TypeFeedbackId::None());
       __ mov(edi, eax);
       __ mov(Operand(esp, 2 * kPointerSize), edi);
-      CallFunctionStub stub(1, CALL_AS_METHOD);
+      CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
       __ CallStub(&stub);
 
       __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
@@ -2290,7 +2296,7 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
 
   __ bind(&stub_call);
   __ mov(eax, ecx);
-  BinaryOpICStub stub(op, mode);
+  BinaryOpICStub stub(isolate(), op, mode);
   CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
   patch_site.EmitPatchInfo();
   __ jmp(&done, Label::kNear);
@@ -2373,7 +2379,7 @@ void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
                                      Token::Value op,
                                      OverwriteMode mode) {
   __ pop(edx);
-  BinaryOpICStub stub(op, mode);
+  BinaryOpICStub stub(isolate(), op, mode);
   JumpPatchSite patch_site(masm_);    // unbound, signals no inlined smi code.
   CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
   patch_site.EmitPatchInfo();
@@ -2622,7 +2628,7 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr) {
 
   // Record source position of the IC call.
   SetSourcePosition(expr->position());
-  CallFunctionStub stub(arg_count, flags);
+  CallFunctionStub stub(isolate(), arg_count, flags);
   __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
   __ CallStub(&stub);
   RecordJSReturnSite(expr);
@@ -2665,7 +2671,7 @@ void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
 
   // Record source position of the IC call.
   SetSourcePosition(expr->position());
-  CallFunctionStub stub(arg_count, CALL_AS_METHOD);
+  CallFunctionStub stub(isolate(), arg_count, CALL_AS_METHOD);
   __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
   __ CallStub(&stub);
   RecordJSReturnSite(expr);
@@ -2696,7 +2702,7 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr) {
   __ mov(edx, Immediate(Smi::FromInt(expr->CallFeedbackSlot())));
 
   // Record call targets in unoptimized code.
-  CallFunctionStub stub(arg_count, RECORD_CALL_TARGET);
+  CallFunctionStub stub(isolate(), arg_count, RECORD_CALL_TARGET);
   __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
   __ CallStub(&stub);
 
@@ -2766,7 +2772,7 @@ void FullCodeGenerator::VisitCall(Call* expr) {
     }
     // Record source position for debugger.
     SetSourcePosition(expr->position());
-    CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS);
+    CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
     __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
     __ CallStub(&stub);
     RecordJSReturnSite(expr);
@@ -2882,7 +2888,7 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
   __ LoadHeapObject(ebx, FeedbackVector());
   __ mov(edx, Immediate(Smi::FromInt(expr->CallNewFeedbackSlot())));
 
-  CallConstructStub stub(RECORD_CALL_TARGET);
+  CallConstructStub stub(isolate(), RECORD_CALL_TARGET);
   __ call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL);
   PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
   context()->Plug(eax);
@@ -3259,7 +3265,7 @@ void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
   VisitForAccumulatorValue(args->at(0));
   __ mov(edx, eax);
   __ Move(eax, Immediate(Smi::FromInt(info_->scope()->num_parameters())));
-  ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
+  ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
   __ CallStub(&stub);
   context()->Plug(eax);
 }
@@ -3372,7 +3378,7 @@ void FullCodeGenerator::EmitLog(CallRuntime* expr) {
 
 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
   // Load the arguments on the stack and call the stub.
-  SubStringStub stub;
+  SubStringStub stub(isolate());
   ZoneList<Expression*>* args = expr->arguments();
   ASSERT(args->length() == 3);
   VisitForStackValue(args->at(0));
@@ -3385,7 +3391,7 @@ void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
 
 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
   // Load the arguments on the stack and call the stub.
-  RegExpExecStub stub;
+  RegExpExecStub stub(isolate());
   ZoneList<Expression*>* args = expr->arguments();
   ASSERT(args->length() == 4);
   VisitForStackValue(args->at(0));
@@ -3538,7 +3544,7 @@ void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
   VisitForStackValue(args->at(1));
 
   if (CpuFeatures::IsSupported(SSE2)) {
-    MathPowStub stub(MathPowStub::ON_STACK);
+    MathPowStub stub(isolate(), MathPowStub::ON_STACK);
     __ CallStub(&stub);
   } else {
     __ CallRuntime(Runtime::kHiddenMathPowSlow, 2);
@@ -3583,7 +3589,7 @@ void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
   // Load the argument into eax and call the stub.
   VisitForAccumulatorValue(args->at(0));
 
-  NumberToStringStub stub;
+  NumberToStringStub stub(isolate());
   __ CallStub(&stub);
   context()->Plug(eax);
 }
@@ -3709,7 +3715,7 @@ void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
   VisitForAccumulatorValue(args->at(1));
 
   __ pop(edx);
-  StringAddStub stub(STRING_ADD_CHECK_BOTH, NOT_TENURED);
+  StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
   __ CallStub(&stub);
   context()->Plug(eax);
 }
@@ -3722,7 +3728,7 @@ void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
   VisitForStackValue(args->at(0));
   VisitForStackValue(args->at(1));
 
-  StringCompareStub stub;
+  StringCompareStub stub(isolate());
   __ CallStub(&stub);
   context()->Plug(eax);
 }
@@ -3762,7 +3768,7 @@ void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
 
 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
   // Load the arguments on the stack and call the stub.
-  RegExpConstructResultStub stub;
+  RegExpConstructResultStub stub(isolate());
   ZoneList<Expression*>* args = expr->arguments();
   ASSERT(args->length() == 3);
   VisitForStackValue(args->at(0));
@@ -4160,7 +4166,7 @@ void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
 
     // Record source position of the IC call.
     SetSourcePosition(expr->position());
-    CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS);
+    CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
     __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
     __ CallStub(&stub);
     // Restore context register.
@@ -4389,7 +4395,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
     __ jmp(&stub_call, Label::kNear);
     __ bind(&slow);
   }
-  ToNumberStub convert_stub;
+  ToNumberStub convert_stub(isolate());
   __ CallStub(&convert_stub);
 
   // Save result for postfix expressions.
@@ -4419,7 +4425,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
   __ bind(&stub_call);
   __ mov(edx, eax);
   __ mov(eax, Immediate(Smi::FromInt(1)));
-  BinaryOpICStub stub(expr->binary_op(), NO_OVERWRITE);
+  BinaryOpICStub stub(isolate(), expr->binary_op(), NO_OVERWRITE);
   CallIC(stub.GetCode(isolate()), expr->CountBinOpFeedbackId());
   patch_site.EmitPatchInfo();
   __ bind(&done);
@@ -4630,7 +4636,7 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
 
     case Token::INSTANCEOF: {
       VisitForStackValue(expr->right());
-      InstanceofStub stub(InstanceofStub::kNoFlags);
+      InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
       __ CallStub(&stub);
       PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
       __ test(eax, eax);
index a7d78d315994546d7a4aab8d29c9c6c8d5f25a40..5683b0dd472cdeaec715ead09cdb48e9ee650a49 100644 (file)
@@ -293,7 +293,7 @@ bool LCodeGen::GeneratePrologue() {
     Comment(";;; Allocate local context");
     // Argument to NewContext is the function, which is still in edi.
     if (heap_slots <= FastNewContextStub::kMaximumSlots) {
-      FastNewContextStub stub(heap_slots);
+      FastNewContextStub stub(isolate(), heap_slots);
       __ CallStub(&stub);
     } else {
       __ push(edi);
@@ -1350,17 +1350,17 @@ void LCodeGen::DoCallStub(LCallStub* instr) {
   ASSERT(ToRegister(instr->result()).is(eax));
   switch (instr->hydrogen()->major_key()) {
     case CodeStub::RegExpExec: {
-      RegExpExecStub stub;
+      RegExpExecStub stub(isolate());
       CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
       break;
     }
     case CodeStub::SubString: {
-      SubStringStub stub;
+      SubStringStub stub(isolate());
       CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
       break;
     }
     case CodeStub::StringCompare: {
-      StringCompareStub stub;
+      StringCompareStub stub(isolate());
       CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
       break;
     }
@@ -2357,7 +2357,7 @@ void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
   ASSERT(ToRegister(instr->right()).is(eax));
   ASSERT(ToRegister(instr->result()).is(eax));
 
-  BinaryOpICStub stub(instr->op(), NO_OVERWRITE);
+  BinaryOpICStub stub(isolate(), instr->op(), NO_OVERWRITE);
   CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
 }
 
@@ -2975,7 +2975,7 @@ void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
 void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
   // Object and function are in fixed registers defined by the stub.
   ASSERT(ToRegister(instr->context()).is(esi));
-  InstanceofStub stub(InstanceofStub::kArgsInRegisters);
+  InstanceofStub stub(isolate(), InstanceofStub::kArgsInRegisters);
   CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
 
   Label true_value, done;
@@ -3064,7 +3064,7 @@ void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
       flags | InstanceofStub::kCallSiteInlineCheck);
   flags = static_cast<InstanceofStub::Flags>(
       flags | InstanceofStub::kReturnTrueFalseObject);
-  InstanceofStub stub(flags);
+  InstanceofStub stub(isolate(), flags);
 
   // Get the temp register reserved by the instruction. This needs to be a
   // register which is pushed last by PushSafepointRegisters as top of the
@@ -4167,7 +4167,7 @@ void LCodeGen::DoPower(LPower* instr) {
   ASSERT(ToDoubleRegister(instr->result()).is(xmm3));
 
   if (exponent_type.IsSmi()) {
-    MathPowStub stub(MathPowStub::TAGGED);
+    MathPowStub stub(isolate(), MathPowStub::TAGGED);
     __ CallStub(&stub);
   } else if (exponent_type.IsTagged()) {
     Label no_deopt;
@@ -4175,14 +4175,14 @@ void LCodeGen::DoPower(LPower* instr) {
     __ CmpObjectType(eax, HEAP_NUMBER_TYPE, ecx);
     DeoptimizeIf(not_equal, instr->environment());
     __ bind(&no_deopt);
-    MathPowStub stub(MathPowStub::TAGGED);
+    MathPowStub stub(isolate(), MathPowStub::TAGGED);
     __ CallStub(&stub);
   } else if (exponent_type.IsInteger32()) {
-    MathPowStub stub(MathPowStub::INTEGER);
+    MathPowStub stub(isolate(), MathPowStub::INTEGER);
     __ CallStub(&stub);
   } else {
     ASSERT(exponent_type.IsDouble());
-    MathPowStub stub(MathPowStub::DOUBLE);
+    MathPowStub stub(isolate(), MathPowStub::DOUBLE);
     __ CallStub(&stub);
   }
 }
@@ -4275,7 +4275,7 @@ void LCodeGen::DoCallFunction(LCallFunction* instr) {
   ASSERT(ToRegister(instr->result()).is(eax));
 
   int arity = instr->arity();
-  CallFunctionStub stub(arity, instr->hydrogen()->function_flags());
+  CallFunctionStub stub(isolate(), arity, instr->hydrogen()->function_flags());
   CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
 }
 
@@ -4287,7 +4287,7 @@ void LCodeGen::DoCallNew(LCallNew* instr) {
 
   // No cell in ebx for construct type feedback in optimized code
   __ mov(ebx, isolate()->factory()->undefined_value());
-  CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
+  CallConstructStub stub(isolate(), NO_CALL_FUNCTION_FLAGS);
   __ Move(eax, Immediate(instr->arity()));
   CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
 }
@@ -4307,7 +4307,7 @@ void LCodeGen::DoCallNewArray(LCallNewArray* instr) {
           : DONT_OVERRIDE;
 
   if (instr->arity() == 0) {
-    ArrayNoArgumentConstructorStub stub(kind, override_mode);
+    ArrayNoArgumentConstructorStub stub(isolate(), kind, override_mode);
     CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
   } else if (instr->arity() == 1) {
     Label done;
@@ -4320,17 +4320,19 @@ void LCodeGen::DoCallNewArray(LCallNewArray* instr) {
       __ j(zero, &packed_case, Label::kNear);
 
       ElementsKind holey_kind = GetHoleyElementsKind(kind);
-      ArraySingleArgumentConstructorStub stub(holey_kind, override_mode);
+      ArraySingleArgumentConstructorStub stub(isolate(),
+                                              holey_kind,
+                                              override_mode);
       CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
       __ jmp(&done, Label::kNear);
       __ bind(&packed_case);
     }
 
-    ArraySingleArgumentConstructorStub stub(kind, override_mode);
+    ArraySingleArgumentConstructorStub stub(isolate(), kind, override_mode);
     CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
     __ bind(&done);
   } else {
-    ArrayNArgumentsConstructorStub stub(kind, override_mode);
+    ArrayNArgumentsConstructorStub stub(isolate(), kind, override_mode);
     CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
   }
 }
@@ -4783,7 +4785,7 @@ void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) {
     PushSafepointRegistersScope scope(this);
     __ mov(ebx, to_map);
     bool is_js_array = from_map->instance_type() == JS_ARRAY_TYPE;
-    TransitionElementsKindStub stub(from_kind, to_kind, is_js_array);
+    TransitionElementsKindStub stub(isolate(), from_kind, to_kind, is_js_array);
     __ CallStub(&stub);
     RecordSafepointWithLazyDeopt(instr,
         RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
@@ -4907,7 +4909,8 @@ void LCodeGen::DoStringAdd(LStringAdd* instr) {
   ASSERT(ToRegister(instr->context()).is(esi));
   ASSERT(ToRegister(instr->left()).is(edx));
   ASSERT(ToRegister(instr->right()).is(eax));
-  StringAddStub stub(instr->hydrogen()->flags(),
+  StringAddStub stub(isolate(),
+                     instr->hydrogen()->flags(),
                      instr->hydrogen()->pretenure_flag());
   CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
 }
@@ -6043,7 +6046,8 @@ void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
   // space for nested functions that don't need literals cloning.
   bool pretenure = instr->hydrogen()->pretenure();
   if (!pretenure && instr->hydrogen()->has_no_literals()) {
-    FastNewClosureStub stub(instr->hydrogen()->strict_mode(),
+    FastNewClosureStub stub(isolate(),
+                            instr->hydrogen()->strict_mode(),
                             instr->hydrogen()->is_generator());
     __ mov(ebx, Immediate(instr->hydrogen()->shared_info()));
     CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
index 7847b3b398d5a749619c684c0fea00494f5ee362..3b2e039c4b05b8f361a8ddf1abb42e50c3e8c16a 100644 (file)
@@ -198,7 +198,7 @@ void MacroAssembler::RememberedSetHelper(
     j(equal, &done, Label::kNear);
   }
   StoreBufferOverflowStub store_buffer_overflow =
-      StoreBufferOverflowStub(save_fp);
+      StoreBufferOverflowStub(isolate(), save_fp);
   CallStub(&store_buffer_overflow);
   if (and_then == kReturnAtEnd) {
     ret(0);
@@ -247,7 +247,7 @@ void MacroAssembler::ClampUint8(Register reg) {
 void MacroAssembler::SlowTruncateToI(Register result_reg,
                                      Register input_reg,
                                      int offset) {
-  DoubleToIStub stub(input_reg, result_reg, offset, true);
+  DoubleToIStub stub(isolate(), input_reg, result_reg, offset, true);
   call(stub.GetCode(isolate()), RelocInfo::CODE_TARGET);
 }
 
@@ -634,7 +634,8 @@ void MacroAssembler::RecordWriteForMap(
   // them.
   lea(address, FieldOperand(object, HeapObject::kMapOffset));
   mov(value, Immediate(map));
-  RecordWriteStub stub(object, value, address, OMIT_REMEMBERED_SET, save_fp);
+  RecordWriteStub stub(isolate(), object, value, address, OMIT_REMEMBERED_SET,
+                       save_fp);
   CallStub(&stub);
 
   bind(&done);
@@ -699,7 +700,8 @@ void MacroAssembler::RecordWrite(Register object,
                 &done,
                 Label::kNear);
 
-  RecordWriteStub stub(object, value, address, remembered_set_action, fp_mode);
+  RecordWriteStub stub(isolate(), object, value, address, remembered_set_action,
+                       fp_mode);
   CallStub(&stub);
 
   bind(&done);
@@ -717,7 +719,7 @@ void MacroAssembler::RecordWrite(Register object,
 void MacroAssembler::DebugBreak() {
   Move(eax, Immediate(0));
   mov(ebx, Immediate(ExternalReference(Runtime::kDebugBreak, isolate())));
-  CEntryStub ces(1);
+  CEntryStub ces(isolate(), 1);
   call(ces.GetCode(isolate()), RelocInfo::DEBUG_BREAK);
 }
 #endif
@@ -2246,8 +2248,10 @@ void MacroAssembler::CallRuntime(const Runtime::Function* f,
   // smarter.
   Move(eax, Immediate(num_arguments));
   mov(ebx, Immediate(ExternalReference(f, isolate())));
-  CEntryStub ces(1, CpuFeatures::IsSupported(SSE2) ? save_doubles
-                                                   : kDontSaveFPRegs);
+  CEntryStub ces(isolate(),
+                 1,
+                 CpuFeatures::IsSupported(SSE2) ? save_doubles
+                                                : kDontSaveFPRegs);
   CallStub(&ces);
 }
 
@@ -2257,7 +2261,7 @@ void MacroAssembler::CallExternalReference(ExternalReference ref,
   mov(eax, Immediate(num_arguments));
   mov(ebx, Immediate(ref));
 
-  CEntryStub stub(1);
+  CEntryStub stub(isolate(), 1);
   CallStub(&stub);
 }
 
@@ -2451,7 +2455,7 @@ void MacroAssembler::CallApiFunctionAndReturn(
 void MacroAssembler::JumpToExternalReference(const ExternalReference& ext) {
   // Set the entry point and jump to the C entry runtime stub.
   mov(ebx, Immediate(ext));
-  CEntryStub ces(1);
+  CEntryStub ces(isolate(), 1);
   jmp(ces.GetCode(isolate()), RelocInfo::CODE_TARGET);
 }
 
index b3bc71cbe71b72900740a7a15ff016b7f8607f8d..768ce5e2690bbf50515cfa2d25908b058c433227 100644 (file)
@@ -446,7 +446,7 @@ void StubCompiler::GenerateFastApiCall(MacroAssembler* masm,
   __ mov(api_function_address, Immediate(function_address));
 
   // Jump to stub.
-  CallApiFunctionStub stub(is_store, call_data_undefined, argc);
+  CallApiFunctionStub stub(isolate, is_store, call_data_undefined, argc);
   __ TailCallStub(&stub);
 }
 
@@ -1033,12 +1033,14 @@ void LoadStubCompiler::GenerateLoadField(Register reg,
                                          Representation representation) {
   if (!reg.is(receiver())) __ mov(receiver(), reg);
   if (kind() == Code::LOAD_IC) {
-    LoadFieldStub stub(field.is_inobject(holder),
+    LoadFieldStub stub(isolate(),
+                       field.is_inobject(holder),
                        field.translate(holder),
                        representation);
     GenerateTailCall(masm(), stub.GetCode(isolate()));
   } else {
-    KeyedLoadFieldStub stub(field.is_inobject(holder),
+    KeyedLoadFieldStub stub(isolate(),
+                            field.is_inobject(holder),
                             field.translate(holder),
                             representation);
     GenerateTailCall(masm(), stub.GetCode(isolate()));
@@ -1087,7 +1089,7 @@ void LoadStubCompiler::GenerateLoadCallback(
   Address function_address = v8::ToCData<Address>(callback->getter());
   __ mov(getter_address, Immediate(function_address));
 
-  CallApiGetterStub stub;
+  CallApiGetterStub stub(isolate());
   __ TailCallStub(&stub);
 }
 
index 8d720eaa22e7c9ea8dff388f7352fe4e2e60e754..f1fec43b3077e2375a4263b7d9e647f84af4e3b7 100644 (file)
--- a/src/ic.cc
+++ b/src/ic.cc
@@ -588,7 +588,7 @@ MaybeHandle<Object> LoadIC::Load(Handle<Object> object, Handle<String> name) {
       if (state() == UNINITIALIZED) {
         stub = pre_monomorphic_stub();
       } else if (state() == PREMONOMORPHIC) {
-        FunctionPrototypeStub function_prototype_stub(kind());
+        FunctionPrototypeStub function_prototype_stub(isolate(), kind());
         stub = function_prototype_stub.GetCode(isolate());
       } else if (state() != MEGAMORPHIC) {
         ASSERT(state() != GENERIC);
@@ -851,10 +851,10 @@ Handle<Code> LoadIC::SimpleFieldLoad(int offset,
                                      bool inobject,
                                      Representation representation) {
   if (kind() == Code::LOAD_IC) {
-    LoadFieldStub stub(inobject, offset, representation);
+    LoadFieldStub stub(isolate(), inobject, offset, representation);
     return stub.GetCode(isolate());
   } else {
-    KeyedLoadFieldStub stub(inobject, offset, representation);
+    KeyedLoadFieldStub stub(isolate(), inobject, offset, representation);
     return stub.GetCode(isolate());
   }
 }
@@ -940,10 +940,10 @@ Handle<Code> LoadIC::CompileHandler(LookupResult* lookup,
   if (object->IsStringWrapper() &&
       String::Equals(isolate()->factory()->length_string(), name)) {
     if (kind() == Code::LOAD_IC) {
-      StringLengthStub string_length_stub;
+      StringLengthStub string_length_stub(isolate());
       return string_length_stub.GetCode(isolate());
     } else {
-      KeyedStringLengthStub string_length_stub;
+      KeyedStringLengthStub string_length_stub(isolate());
       return string_length_stub.GetCode(isolate());
     }
   }
@@ -1424,7 +1424,7 @@ Handle<Code> StoreIC::CompileHandler(LookupResult* lookup,
           Handle<PropertyCell> cell(global->GetPropertyCell(lookup), isolate());
           Handle<HeapType> union_type = PropertyCell::UpdatedType(cell, value);
           StoreGlobalStub stub(
-              union_type->IsConstant(), receiver->IsJSGlobalProxy());
+              isolate(), union_type->IsConstant(), receiver->IsJSGlobalProxy());
           Handle<Code> code = stub.GetCodeCopyFromTemplate(
               isolate(), global, cell);
           // TODO(verwaest): Move caching of these NORMAL stubs outside as well.
@@ -2512,14 +2512,14 @@ MaybeHandle<Object> BinaryOpIC::Transition(
     }
 
     // Install the stub with an allocation site.
-    BinaryOpICWithAllocationSiteStub stub(state);
+    BinaryOpICWithAllocationSiteStub stub(isolate(), state);
     target = stub.GetCodeCopyFromTemplate(isolate(), allocation_site);
 
     // Sanity check the trampoline stub.
     ASSERT_EQ(*allocation_site, target->FindFirstAllocationSite());
   } else {
     // Install the generic stub.
-    BinaryOpICStub stub(state);
+    BinaryOpICStub stub(isolate(), state);
     target = stub.GetCode(isolate());
 
     // Sanity check the generic stub.
@@ -2591,7 +2591,7 @@ RUNTIME_FUNCTION(BinaryOpIC_MissWithAllocationSite) {
 
 
 Code* CompareIC::GetRawUninitialized(Isolate* isolate, Token::Value op) {
-  ICCompareStub stub(op, UNINITIALIZED, UNINITIALIZED, UNINITIALIZED);
+  ICCompareStub stub(isolate, op, UNINITIALIZED, UNINITIALIZED, UNINITIALIZED);
   Code* code = NULL;
   CHECK(stub.FindCodeInCache(&code, isolate));
   return code;
@@ -2599,7 +2599,7 @@ Code* CompareIC::GetRawUninitialized(Isolate* isolate, Token::Value op) {
 
 
 Handle<Code> CompareIC::GetUninitialized(Isolate* isolate, Token::Value op) {
-  ICCompareStub stub(op, UNINITIALIZED, UNINITIALIZED, UNINITIALIZED);
+  ICCompareStub stub(isolate, op, UNINITIALIZED, UNINITIALIZED, UNINITIALIZED);
   return stub.GetCode(isolate);
 }
 
@@ -2772,7 +2772,7 @@ Code* CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) {
   State new_right = NewInputState(previous_right, y);
   State state = TargetState(previous_state, previous_left, previous_right,
                             HasInlinedSmiCode(address()), x, y);
-  ICCompareStub stub(op_, new_left, new_right, state);
+  ICCompareStub stub(isolate(), op_, new_left, new_right, state);
   if (state == KNOWN_OBJECT) {
     stub.set_known_map(
         Handle<Map>(Handle<JSObject>::cast(x)->map(), isolate()));
@@ -2818,7 +2818,9 @@ void CompareNilIC::Clear(Address address,
   if (IsCleared(target)) return;
   ExtraICState state = target->extra_ic_state();
 
-  CompareNilICStub stub(state, HydrogenCodeStub::UNINITIALIZED);
+  CompareNilICStub stub(target->GetIsolate(),
+                        state,
+                        HydrogenCodeStub::UNINITIALIZED);
   stub.ClearState();
 
   Code* code = NULL;
@@ -2841,7 +2843,7 @@ Handle<Object> CompareNilIC::DoCompareNilSlow(Isolate* isolate,
 Handle<Object> CompareNilIC::CompareNil(Handle<Object> object) {
   ExtraICState extra_ic_state = target()->extra_ic_state();
 
-  CompareNilICStub stub(extra_ic_state);
+  CompareNilICStub stub(isolate(), extra_ic_state);
 
   // Extract the current supported types from the patched IC and calculate what
   // types must be supported as a result of the miss.
@@ -2923,7 +2925,7 @@ Builtins::JavaScript BinaryOpIC::TokenToJSBuiltin(Token::Value op) {
 
 
 Handle<Object> ToBooleanIC::ToBoolean(Handle<Object> object) {
-  ToBooleanStub stub(target()->extra_ic_state());
+  ToBooleanStub stub(isolate(), target()->extra_ic_state());
   bool to_boolean_value = stub.UpdateStatus(object);
   Handle<Code> code = stub.GetCode(isolate());
   set_target(*code);
index 4f60c809533c9e9c0f6597f9c694b6ea0a9a62eb..36eee69380de6427cfcc82e0c29052a7ab950187 100644 (file)
@@ -1188,13 +1188,14 @@ Handle<Code> KeyedLoadStubCompiler::CompileLoadElement(
       receiver_map->has_external_array_elements() ||
       receiver_map->has_fixed_typed_array_elements()) {
     Handle<Code> stub = KeyedLoadFastElementStub(
+        isolate(),
         receiver_map->instance_type() == JS_ARRAY_TYPE,
         elements_kind).GetCode(isolate());
     __ DispatchMap(receiver(), scratch1(), receiver_map, stub, DO_SMI_CHECK);
   } else {
     Handle<Code> stub = FLAG_compiled_keyed_dictionary_loads
-        ? KeyedLoadDictionaryElementStub().GetCode(isolate())
-        : KeyedLoadDictionaryElementPlatformStub().GetCode(isolate());
+        ? KeyedLoadDictionaryElementStub(isolate()).GetCode(isolate())
+        : KeyedLoadDictionaryElementPlatformStub(isolate()).GetCode(isolate());
     __ DispatchMap(receiver(), scratch1(), receiver_map, stub, DO_SMI_CHECK);
   }
 
@@ -1214,11 +1215,13 @@ Handle<Code> KeyedStoreStubCompiler::CompileStoreElement(
       receiver_map->has_external_array_elements() ||
       receiver_map->has_fixed_typed_array_elements()) {
     stub = KeyedStoreFastElementStub(
+        isolate(),
         is_jsarray,
         elements_kind,
         store_mode()).GetCode(isolate());
   } else {
-    stub = KeyedStoreElementStub(is_jsarray,
+    stub = KeyedStoreElementStub(isolate(),
+                                 is_jsarray,
                                  elements_kind,
                                  store_mode()).GetCode(isolate());
   }
@@ -1314,13 +1317,15 @@ void KeyedLoadStubCompiler::CompileElementHandlers(MapHandleList* receiver_maps,
           IsExternalArrayElementsKind(elements_kind) ||
           IsFixedTypedArrayElementsKind(elements_kind)) {
         cached_stub =
-            KeyedLoadFastElementStub(is_js_array,
+            KeyedLoadFastElementStub(isolate(),
+                                     is_js_array,
                                      elements_kind).GetCode(isolate());
       } else if (elements_kind == SLOPPY_ARGUMENTS_ELEMENTS) {
         cached_stub = isolate()->builtins()->KeyedLoadIC_SloppyArguments();
       } else {
         ASSERT(elements_kind == DICTIONARY_ELEMENTS);
-        cached_stub = KeyedLoadDictionaryElementStub().GetCode(isolate());
+        cached_stub =
+            KeyedLoadDictionaryElementStub(isolate()).GetCode(isolate());
       }
     }
 
@@ -1349,6 +1354,7 @@ Handle<Code> KeyedStoreStubCompiler::CompileStoreElementPolymorphic(
     ElementsKind elements_kind = receiver_map->elements_kind();
     if (!transitioned_map.is_null()) {
       cached_stub = ElementsTransitionAndStoreStub(
+          isolate(),
           elements_kind,
           transitioned_map->elements_kind(),
           is_js_array,
@@ -1360,11 +1366,13 @@ Handle<Code> KeyedStoreStubCompiler::CompileStoreElementPolymorphic(
           receiver_map->has_external_array_elements() ||
           receiver_map->has_fixed_typed_array_elements()) {
         cached_stub = KeyedStoreFastElementStub(
+            isolate(),
             is_js_array,
             elements_kind,
             store_mode()).GetCode(isolate());
       } else {
         cached_stub = KeyedStoreElementStub(
+            isolate(),
             is_js_array,
             elements_kind,
             store_mode()).GetCode(isolate());
index ce54504152874e31344449284c294519fe9a9059..71236f7d9c3c81af556ec3c0cd54a0be7b51956d 100644 (file)
@@ -228,7 +228,7 @@ void TypeFeedbackOracle::CompareType(TypeFeedbackId id,
     CompareIC::StubInfoToType(
         stub_minor_key, left_type, right_type, combined_type, map, zone());
   } else if (code->is_compare_nil_ic_stub()) {
-    CompareNilICStub stub(code->extra_ic_state());
+    CompareNilICStub stub(isolate(), code->extra_ic_state());
     *combined_type = stub.GetType(zone(), map);
     *left_type = *right_type = stub.GetInputType(zone(), map);
   }
@@ -286,7 +286,7 @@ void TypeFeedbackOracle::PropertyReceiverTypes(
     TypeFeedbackId id, Handle<String> name,
     SmallMapList* receiver_types, bool* is_prototype) {
   receiver_types->Clear();
-  FunctionPrototypeStub proto_stub(Code::LOAD_IC);
+  FunctionPrototypeStub proto_stub(isolate(), Code::LOAD_IC);
   *is_prototype = LoadIsStub(id, &proto_stub);
   if (!*is_prototype) {
     Code::Flags flags = Code::ComputeHandlerFlags(Code::LOAD_IC);
index 19e7b533c9d425ed5062526818ae7f86e679db8d..a08d32645d3b0b9338cffac75728305f7ee23e13 100644 (file)
@@ -600,7 +600,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
       // No type feedback cell is available
       __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
       // Expects rdi to hold function pointer.
-      CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
+      CallConstructStub stub(masm->isolate(), NO_CALL_FUNCTION_FLAGS);
       __ CallStub(&stub);
     } else {
       ParameterCount actual(rax);
index 2e2b975c79108b6d29705f6662072b2ef7185e1c..9b350adeea894569798b2b210c26c287d03086cb 100644 (file)
@@ -479,10 +479,9 @@ void CallDescriptors::InitializeForIsolate(Isolate* isolate) {
 
 void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) {
   // Update the static counter each time a new code stub is generated.
-  Isolate* isolate = masm->isolate();
-  isolate->counters()->code_stubs()->Increment();
+  isolate()->counters()->code_stubs()->Increment();
 
-  CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(isolate);
+  CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(isolate());
   int param_count = descriptor->register_param_count_;
   {
     // Call the runtime system in a fresh internal frame.
@@ -506,11 +505,11 @@ void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
   const int argument_count = 1;
   __ PrepareCallCFunction(argument_count);
   __ LoadAddress(arg_reg_1,
-                 ExternalReference::isolate_address(masm->isolate()));
+                 ExternalReference::isolate_address(isolate()));
 
   AllowExternalCallThatCantCauseGC scope(masm);
   __ CallCFunction(
-      ExternalReference::store_buffer_overflow_function(masm->isolate()),
+      ExternalReference::store_buffer_overflow_function(isolate()),
       argument_count);
   __ PopCallerSaved(save_doubles_);
   __ ret(0);
@@ -871,7 +870,7 @@ void MathPowStub::Generate(MacroAssembler* masm) {
   __ Cvtlsi2sd(double_exponent, exponent);
 
   // Returning or bailing out.
-  Counters* counters = masm->isolate()->counters();
+  Counters* counters = isolate()->counters();
   if (exponent_type_ == ON_STACK) {
     // The arguments are still on the stack.
     __ bind(&call_runtime);
@@ -893,7 +892,7 @@ void MathPowStub::Generate(MacroAssembler* masm) {
       AllowExternalCallThatCantCauseGC scope(masm);
       __ PrepareCallCFunction(2);
       __ CallCFunction(
-          ExternalReference::power_double_double_function(masm->isolate()), 2);
+          ExternalReference::power_double_double_function(isolate()), 2);
     }
     // Return value is in xmm0.
     __ movsd(double_result, xmm0);
@@ -914,7 +913,7 @@ void FunctionPrototypeStub::Generate(MacroAssembler* masm) {
     //  -- rdx    : receiver
     //  -- rsp[0] : return address
     // -----------------------------------
-    __ Cmp(rax, masm->isolate()->factory()->prototype_string());
+    __ Cmp(rax, isolate()->factory()->prototype_string());
     __ j(not_equal, &miss);
     receiver = rdx;
   } else {
@@ -1000,7 +999,7 @@ void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
   //  rbx: the mapped parameter count (untagged)
   //  rax: the allocated object (tagged).
 
-  Factory* factory = masm->isolate()->factory();
+  Factory* factory = isolate()->factory();
 
   StackArgumentsAccessor args(rsp, 3, ARGUMENTS_DONT_CONTAIN_RECEIVER);
   __ SmiToInteger64(rbx, args.GetArgumentOperand(2));
@@ -1363,11 +1362,10 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
                               ARGUMENTS_DONT_CONTAIN_RECEIVER);
   Label runtime;
   // Ensure that a RegExp stack is allocated.
-  Isolate* isolate = masm->isolate();
   ExternalReference address_of_regexp_stack_memory_address =
-      ExternalReference::address_of_regexp_stack_memory_address(isolate);
+      ExternalReference::address_of_regexp_stack_memory_address(isolate());
   ExternalReference address_of_regexp_stack_memory_size =
-      ExternalReference::address_of_regexp_stack_memory_size(isolate);
+      ExternalReference::address_of_regexp_stack_memory_size(isolate());
   __ Load(kScratchRegister, address_of_regexp_stack_memory_size);
   __ testp(kScratchRegister, kScratchRegister);
   __ j(zero, &runtime);
@@ -1519,7 +1517,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
   // rcx: encoding of subject string (1 if ASCII 0 if two_byte);
   // r11: code
   // All checks done. Now push arguments for native regexp code.
-  Counters* counters = masm->isolate()->counters();
+  Counters* counters = isolate()->counters();
   __ IncrementCounter(counters->regexp_entry_native(), 1);
 
   // Isolates: note we add an additional parameter here (isolate pointer).
@@ -1530,7 +1528,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
 
   // Argument 9: Pass current isolate address.
   __ LoadAddress(kScratchRegister,
-                 ExternalReference::isolate_address(masm->isolate()));
+                 ExternalReference::isolate_address(isolate()));
   __ movq(Operand(rsp, (argument_slots_on_stack - 1) * kRegisterSize),
           kScratchRegister);
 
@@ -1556,8 +1554,8 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
 #endif
 
   // Argument 5: static offsets vector buffer.
-  __ LoadAddress(r8,
-                 ExternalReference::address_of_static_offsets_vector(isolate));
+  __ LoadAddress(
+      r8, ExternalReference::address_of_static_offsets_vector(isolate()));
   // Argument 5 passed in r8 on Linux and on the stack on Windows.
 #ifdef _WIN64
   __ movq(Operand(rsp, (argument_slots_on_stack - 5) * kRegisterSize), r8);
@@ -1682,8 +1680,8 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
                       kDontSaveFPRegs);
 
   // Get the static offsets vector filled by the native regexp code.
-  __ LoadAddress(rcx,
-                 ExternalReference::address_of_static_offsets_vector(isolate));
+  __ LoadAddress(
+      rcx, ExternalReference::address_of_static_offsets_vector(isolate()));
 
   // rbx: last_match_info backing store (FixedArray)
   // rcx: offsets vector
@@ -1716,7 +1714,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
   // haven't created the exception yet. Handle that in the runtime system.
   // TODO(592): Rerunning the RegExp to get the stack overflow exception.
   ExternalReference pending_exception_address(
-      Isolate::kPendingExceptionAddress, isolate);
+      Isolate::kPendingExceptionAddress, isolate());
   Operand pending_exception_operand =
       masm->ExternalOperand(pending_exception_address, rbx);
   __ movp(rax, pending_exception_operand);
@@ -1829,7 +1827,7 @@ static void BranchIfNotInternalizedString(MacroAssembler* masm,
 void ICCompareStub::GenerateGeneric(MacroAssembler* masm) {
   Label check_unequal_objects, done;
   Condition cc = GetCondition();
-  Factory* factory = masm->isolate()->factory();
+  Factory* factory = isolate()->factory();
 
   Label miss;
   CheckInputType(masm, rdx, left_, &miss);
@@ -2163,7 +2161,7 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
       __ Push(rdx);
       __ Push(rbx);
 
-      CreateAllocationSiteStub create_stub;
+      CreateAllocationSiteStub create_stub(isolate);
       __ CallStub(&create_stub);
 
       __ Pop(rbx);
@@ -2202,7 +2200,6 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
   // rdx : (only if rbx is not the megamorphic symbol) slot in feedback
   //       vector (Smi)
   // rdi : the function to call
-  Isolate* isolate = masm->isolate();
   Label slow, non_function, wrap, cont;
   StackArgumentsAccessor args(rsp, argc_);
 
@@ -2268,7 +2265,7 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
       __ SmiToInteger32(rdx, rdx);
       __ Move(FieldOperand(rbx, rdx, times_pointer_size,
                            FixedArray::kHeaderSize),
-              TypeFeedbackInfo::MegamorphicSentinel(isolate));
+              TypeFeedbackInfo::MegamorphicSentinel(isolate()));
       __ Integer32ToSmi(rdx, rdx);
     }
     // Check for function proxy.
@@ -2282,7 +2279,7 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
     __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY);
     {
       Handle<Code> adaptor =
-        masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
+        isolate()->builtins()->ArgumentsAdaptorTrampoline();
       __ jmp(adaptor, RelocInfo::CODE_TARGET);
     }
 
@@ -2294,7 +2291,7 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
     __ Set(rbx, 0);
     __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
     Handle<Code> adaptor =
-        isolate->builtins()->ArgumentsAdaptorTrampoline();
+        isolate()->builtins()->ArgumentsAdaptorTrampoline();
     __ Jump(adaptor, RelocInfo::CODE_TARGET);
   }
 
@@ -2374,7 +2371,7 @@ void CallConstructStub::Generate(MacroAssembler* masm) {
   __ bind(&do_call);
   // Set expected number of arguments to zero (not changing rax).
   __ Set(rbx, 0);
-  __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
+  __ Jump(isolate()->builtins()->ArgumentsAdaptorTrampoline(),
           RelocInfo::CODE_TARGET);
 }
 
@@ -2401,9 +2398,9 @@ void CodeStub::GenerateFPStubs(Isolate* isolate) {
 
 
 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
-  CEntryStub stub(1, kDontSaveFPRegs);
+  CEntryStub stub(isolate, 1, kDontSaveFPRegs);
   stub.GetCode(isolate);
-  CEntryStub save_doubles(1, kSaveFPRegs);
+  CEntryStub save_doubles(isolate, 1, kSaveFPRegs);
   save_doubles.GetCode(isolate);
 }
 
@@ -2450,7 +2447,7 @@ void CEntryStub::Generate(MacroAssembler* masm) {
     // Return result in single register (rax).
     __ movp(rcx, r14);  // argc.
     __ movp(rdx, r15);  // argv.
-    __ Move(r8, ExternalReference::isolate_address(masm->isolate()));
+    __ Move(r8, ExternalReference::isolate_address(isolate()));
   } else {
     ASSERT_EQ(2, result_size_);
     // Pass a pointer to the result location as the first argument.
@@ -2458,14 +2455,14 @@ void CEntryStub::Generate(MacroAssembler* masm) {
     // Pass a pointer to the Arguments object as the second argument.
     __ movp(rdx, r14);  // argc.
     __ movp(r8, r15);   // argv.
-    __ Move(r9, ExternalReference::isolate_address(masm->isolate()));
+    __ Move(r9, ExternalReference::isolate_address(isolate()));
   }
 
 #else  // _WIN64
   // GCC passes arguments in rdi, rsi, rdx, rcx, r8, r9.
   __ movp(rdi, r14);  // argc.
   __ movp(rsi, r15);  // argv.
-  __ Move(rdx, ExternalReference::isolate_address(masm->isolate()));
+  __ Move(rdx, ExternalReference::isolate_address(isolate()));
 #endif
   __ call(rbx);
   // Result is in rax - do not destroy this register!
@@ -2498,7 +2495,7 @@ void CEntryStub::Generate(MacroAssembler* masm) {
   __ j(equal, &exception_returned);
 
   ExternalReference pending_exception_address(
-      Isolate::kPendingExceptionAddress, masm->isolate());
+      Isolate::kPendingExceptionAddress, isolate());
 
   // Check that there is no pending exception, otherwise we
   // should have returned the exception sentinel.
@@ -2595,17 +2592,15 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
     __ InitializeRootRegister();
   }
 
-  Isolate* isolate = masm->isolate();
-
   // Save copies of the top frame descriptor on the stack.
-  ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, isolate);
+  ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, isolate());
   {
     Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp);
     __ Push(c_entry_fp_operand);
   }
 
   // If this is the outermost JS call, set js_entry_sp value.
-  ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate);
+  ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate());
   __ Load(rax, js_entry_sp);
   __ testp(rax, rax);
   __ j(not_zero, &not_outermost_js);
@@ -2626,7 +2621,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
   // Caught exception: Store result (exception) in the pending exception
   // field in the JSEnv and return a failure sentinel.
   ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
-                                      isolate);
+                                      isolate());
   __ Store(pending_exception, rax);
   __ LoadRoot(rax, Heap::kExceptionRootIndex);
   __ jmp(&exit);
@@ -2650,10 +2645,10 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
   // at the time this code is generated.
   if (is_construct) {
     ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
-                                      isolate);
+                                      isolate());
     __ Load(rax, construct_entry);
   } else {
-    ExternalReference entry(Builtins::kJSEntryTrampoline, isolate);
+    ExternalReference entry(Builtins::kJSEntryTrampoline, isolate());
     __ Load(rax, entry);
   }
   __ leap(kScratchRegister, FieldOperand(rax, Code::kHeaderSize));
@@ -3177,7 +3172,7 @@ void SubStringStub::Generate(MacroAssembler* masm) {
   // Longer than original string's length or negative: unsafe arguments.
   __ j(above, &runtime);
   // Return original string.
-  Counters* counters = masm->isolate()->counters();
+  Counters* counters = isolate()->counters();
   __ IncrementCounter(counters->sub_string_native(), 1);
   __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize);
   __ bind(&not_original_string);
@@ -3522,7 +3517,7 @@ void StringCompareStub::Generate(MacroAssembler* masm) {
   __ cmpp(rdx, rax);
   __ j(not_equal, &not_same, Label::kNear);
   __ Move(rax, Smi::FromInt(EQUAL));
-  Counters* counters = masm->isolate()->counters();
+  Counters* counters = isolate()->counters();
   __ IncrementCounter(counters->string_compare_native(), 1);
   __ ret(2 * kPointerSize);
 
@@ -3552,25 +3547,24 @@ void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
   //  -- rax    : right
   //  -- rsp[0] : return address
   // -----------------------------------
-  Isolate* isolate = masm->isolate();
 
   // Load rcx with the allocation site.  We stick an undefined dummy value here
   // and replace it with the real allocation site later when we instantiate this
   // stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate().
-  __ Move(rcx, handle(isolate->heap()->undefined_value()));
+  __ Move(rcx, handle(isolate()->heap()->undefined_value()));
 
   // Make sure that we actually patched the allocation site.
   if (FLAG_debug_code) {
     __ testb(rcx, Immediate(kSmiTagMask));
     __ Assert(not_equal, kExpectedAllocationSite);
     __ Cmp(FieldOperand(rcx, HeapObject::kMapOffset),
-           isolate->factory()->allocation_site_map());
+           isolate()->factory()->allocation_site_map());
     __ Assert(equal, kExpectedAllocationSite);
   }
 
   // Tail call into the stub that handles binary operations with allocation
   // sites.
-  BinaryOpWithAllocationSiteStub stub(state_);
+  BinaryOpWithAllocationSiteStub stub(isolate(), state_);
   __ TailCallStub(&stub);
 }
 
@@ -3616,7 +3610,7 @@ void ICCompareStub::GenerateNumbers(MacroAssembler* masm) {
   // Load left and right operand.
   Label done, left, left_smi, right_smi;
   __ JumpIfSmi(rax, &right_smi, Label::kNear);
-  __ CompareMap(rax, masm->isolate()->factory()->heap_number_map());
+  __ CompareMap(rax, isolate()->factory()->heap_number_map());
   __ j(not_equal, &maybe_undefined1, Label::kNear);
   __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
   __ jmp(&left, Label::kNear);
@@ -3626,7 +3620,7 @@ void ICCompareStub::GenerateNumbers(MacroAssembler* masm) {
 
   __ bind(&left);
   __ JumpIfSmi(rdx, &left_smi, Label::kNear);
-  __ CompareMap(rdx, masm->isolate()->factory()->heap_number_map());
+  __ CompareMap(rdx, isolate()->factory()->heap_number_map());
   __ j(not_equal, &maybe_undefined2, Label::kNear);
   __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
   __ jmp(&done);
@@ -3651,13 +3645,13 @@ void ICCompareStub::GenerateNumbers(MacroAssembler* masm) {
 
   __ bind(&unordered);
   __ bind(&generic_stub);
-  ICCompareStub stub(op_, CompareIC::GENERIC, CompareIC::GENERIC,
+  ICCompareStub stub(isolate(), op_, CompareIC::GENERIC, CompareIC::GENERIC,
                      CompareIC::GENERIC);
-  __ jmp(stub.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
+  __ jmp(stub.GetCode(isolate()), RelocInfo::CODE_TARGET);
 
   __ bind(&maybe_undefined1);
   if (Token::IsOrderedRelationalCompareOp(op_)) {
-    __ Cmp(rax, masm->isolate()->factory()->undefined_value());
+    __ Cmp(rax, isolate()->factory()->undefined_value());
     __ j(not_equal, &miss);
     __ JumpIfSmi(rdx, &unordered);
     __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rcx);
@@ -3667,7 +3661,7 @@ void ICCompareStub::GenerateNumbers(MacroAssembler* masm) {
 
   __ bind(&maybe_undefined2);
   if (Token::IsOrderedRelationalCompareOp(op_)) {
-    __ Cmp(rdx, masm->isolate()->factory()->undefined_value());
+    __ Cmp(rdx, isolate()->factory()->undefined_value());
     __ j(equal, &unordered);
   }
 
@@ -3893,7 +3887,7 @@ void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
   {
     // Call the runtime system in a fresh internal frame.
     ExternalReference miss =
-        ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate());
+        ExternalReference(IC_Utility(IC::kCompareIC_Miss), isolate());
 
     FrameScope scope(masm, StackFrame::INTERNAL);
     __ Push(rdx);
@@ -3966,7 +3960,8 @@ void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
     __ bind(&good);
   }
 
-  NameDictionaryLookupStub stub(properties, r0, r0, NEGATIVE_LOOKUP);
+  NameDictionaryLookupStub stub(masm->isolate(), properties, r0, r0,
+                                NEGATIVE_LOOKUP);
   __ Push(Handle<Object>(name));
   __ Push(Immediate(name->Hash()));
   __ CallStub(&stub);
@@ -4016,7 +4011,8 @@ void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
     __ j(equal, done);
   }
 
-  NameDictionaryLookupStub stub(elements, r0, r1, POSITIVE_LOOKUP);
+  NameDictionaryLookupStub stub(masm->isolate(), elements, r0, r1,
+                                POSITIVE_LOOKUP);
   __ Push(name);
   __ movl(r0, FieldOperand(name, Name::kHashFieldOffset));
   __ shrl(r0, Immediate(Name::kHashShift));
@@ -4077,7 +4073,7 @@ void NameDictionaryLookupStub::Generate(MacroAssembler* masm) {
                              times_pointer_size,
                              kElementsStartOffset - kHeapObjectTag));
 
-    __ Cmp(scratch, masm->isolate()->factory()->undefined_value());
+    __ Cmp(scratch, isolate()->factory()->undefined_value());
     __ j(equal, &not_in_dictionary);
 
     // Stop if found the property.
@@ -4120,9 +4116,9 @@ void NameDictionaryLookupStub::Generate(MacroAssembler* masm) {
 
 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
     Isolate* isolate) {
-  StoreBufferOverflowStub stub1(kDontSaveFPRegs);
+  StoreBufferOverflowStub stub1(isolate, kDontSaveFPRegs);
   stub1.GetCode(isolate);
-  StoreBufferOverflowStub stub2(kSaveFPRegs);
+  StoreBufferOverflowStub stub2(isolate, kSaveFPRegs);
   stub2.GetCode(isolate);
 }
 
@@ -4222,14 +4218,13 @@ void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) {
   // TODO(gc) Can we just set address arg2 in the beginning?
   __ Move(arg_reg_2, address);
   __ LoadAddress(arg_reg_3,
-                 ExternalReference::isolate_address(masm->isolate()));
+                 ExternalReference::isolate_address(isolate()));
   int argument_count = 3;
 
   AllowExternalCallThatCantCauseGC scope(masm);
   __ PrepareCallCFunction(argument_count);
   __ CallCFunction(
-      ExternalReference::incremental_marking_record_write_function(
-          masm->isolate()),
+      ExternalReference::incremental_marking_record_write_function(isolate()),
       argument_count);
   regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_);
 }
@@ -4407,8 +4402,8 @@ void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
 
 
 void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
-  CEntryStub ces(1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs);
-  __ Call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
+  CEntryStub ces(isolate(), 1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs);
+  __ Call(ces.GetCode(isolate()), RelocInfo::CODE_TARGET);
   int parameter_count_offset =
       StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset;
   __ movp(rbx, MemOperand(rbp, parameter_count_offset));
@@ -4424,7 +4419,7 @@ void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
 
 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
   if (masm->isolate()->function_entry_hook() != NULL) {
-    ProfileEntryHookStub stub;
+    ProfileEntryHookStub stub(masm->isolate());
     masm->CallStub(&stub);
   }
 }
@@ -4449,7 +4444,7 @@ void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
   masm->PushCallerSaved(kSaveFPRegs, arg_reg_1, arg_reg_2);
 
   // Call the entry hook function.
-  __ Move(rax, FUNCTION_ADDR(masm->isolate()->function_entry_hook()),
+  __ Move(rax, FUNCTION_ADDR(isolate()->function_entry_hook()),
           Assembler::RelocInfoNone());
 
   AllowExternalCallThatCantCauseGC scope(masm);
@@ -4471,7 +4466,7 @@ template<class T>
 static void CreateArrayDispatch(MacroAssembler* masm,
                                 AllocationSiteOverrideMode mode) {
   if (mode == DISABLE_ALLOCATION_SITES) {
-    T stub(GetInitialFastElementsKind(), mode);
+    T stub(masm->isolate(), GetInitialFastElementsKind(), mode);
     __ TailCallStub(&stub);
   } else if (mode == DONT_OVERRIDE) {
     int last_index = GetSequenceIndexFromFastElementsKind(
@@ -4481,7 +4476,7 @@ static void CreateArrayDispatch(MacroAssembler* masm,
       ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
       __ cmpl(rdx, Immediate(kind));
       __ j(not_equal, &next);
-      T stub(kind);
+      T stub(masm->isolate(), kind);
       __ TailCallStub(&stub);
       __ bind(&next);
     }
@@ -4530,12 +4525,14 @@ static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
     ElementsKind initial = GetInitialFastElementsKind();
     ElementsKind holey_initial = GetHoleyElementsKind(initial);
 
-    ArraySingleArgumentConstructorStub stub_holey(holey_initial,
+    ArraySingleArgumentConstructorStub stub_holey(masm->isolate(),
+                                                  holey_initial,
                                                   DISABLE_ALLOCATION_SITES);
     __ TailCallStub(&stub_holey);
 
     __ bind(&normal_sequence);
-    ArraySingleArgumentConstructorStub stub(initial,
+    ArraySingleArgumentConstructorStub stub(masm->isolate(),
+                                            initial,
                                             DISABLE_ALLOCATION_SITES);
     __ TailCallStub(&stub);
   } else if (mode == DONT_OVERRIDE) {
@@ -4565,7 +4562,7 @@ static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
       ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
       __ cmpl(rdx, Immediate(kind));
       __ j(not_equal, &next);
-      ArraySingleArgumentConstructorStub stub(kind);
+      ArraySingleArgumentConstructorStub stub(masm->isolate(), kind);
       __ TailCallStub(&stub);
       __ bind(&next);
     }
@@ -4584,10 +4581,10 @@ static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
       TERMINAL_FAST_ELEMENTS_KIND);
   for (int i = 0; i <= to_index; ++i) {
     ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
-    T stub(kind);
+    T stub(isolate, kind);
     stub.GetCode(isolate);
     if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) {
-      T stub1(kind, DISABLE_ALLOCATION_SITES);
+      T stub1(isolate, kind, DISABLE_ALLOCATION_SITES);
       stub1.GetCode(isolate);
     }
   }
@@ -4609,11 +4606,11 @@ void InternalArrayConstructorStubBase::GenerateStubsAheadOfTime(
   ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS };
   for (int i = 0; i < 2; i++) {
     // For internal arrays we only need a few things
-    InternalArrayNoArgumentConstructorStub stubh1(kinds[i]);
+    InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[i]);
     stubh1.GetCode(isolate);
-    InternalArraySingleArgumentConstructorStub stubh2(kinds[i]);
+    InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]);
     stubh2.GetCode(isolate);
-    InternalArrayNArgumentsConstructorStub stubh3(kinds[i]);
+    InternalArrayNArgumentsConstructorStub stubh3(isolate, kinds[i]);
     stubh3.GetCode(isolate);
   }
 }
@@ -4697,7 +4694,7 @@ void InternalArrayConstructorStub::GenerateCase(
 
   __ testp(rax, rax);
   __ j(not_zero, &not_zero_case);
-  InternalArrayNoArgumentConstructorStub stub0(kind);
+  InternalArrayNoArgumentConstructorStub stub0(isolate(), kind);
   __ TailCallStub(&stub0);
 
   __ bind(&not_zero_case);
@@ -4713,16 +4710,16 @@ void InternalArrayConstructorStub::GenerateCase(
     __ j(zero, &normal_sequence);
 
     InternalArraySingleArgumentConstructorStub
-        stub1_holey(GetHoleyElementsKind(kind));
+        stub1_holey(isolate(), GetHoleyElementsKind(kind));
     __ TailCallStub(&stub1_holey);
   }
 
   __ bind(&normal_sequence);
-  InternalArraySingleArgumentConstructorStub stub1(kind);
+  InternalArraySingleArgumentConstructorStub stub1(isolate(), kind);
   __ TailCallStub(&stub1);
 
   __ bind(&not_one_case);
-  InternalArrayNArgumentsConstructorStub stubN(kind);
+  InternalArrayNArgumentsConstructorStub stubN(isolate(), kind);
   __ TailCallStub(&stubN);
 }
 
@@ -4838,7 +4835,7 @@ void CallApiFunctionStub::Generate(MacroAssembler* masm) {
   __ Push(scratch);
   // isolate
   __ Move(scratch,
-          ExternalReference::isolate_address(masm->isolate()));
+          ExternalReference::isolate_address(isolate()));
   __ Push(scratch);
   // holder
   __ Push(holder);
index 8c8ab691ac9305402f21fef2489f7076a1ed9a95..773b3d561a235bf7837a9354f78710414348fe0a 100644 (file)
@@ -38,8 +38,8 @@ void ArrayNativeCode(MacroAssembler* masm, Label* call_generic_code);
 
 class StoreBufferOverflowStub: public PlatformCodeStub {
  public:
-  explicit StoreBufferOverflowStub(SaveFPRegsMode save_fp)
-      : save_doubles_(save_fp) { }
+  StoreBufferOverflowStub(Isolate* isolate, SaveFPRegsMode save_fp)
+      : PlatformCodeStub(isolate), save_doubles_(save_fp) { }
 
   void Generate(MacroAssembler* masm);
 
@@ -86,7 +86,7 @@ class StringHelper : public AllStatic {
 
 class SubStringStub: public PlatformCodeStub {
  public:
-  SubStringStub() {}
+  explicit SubStringStub(Isolate* isolate) : PlatformCodeStub(isolate) {}
 
  private:
   Major MajorKey() { return SubString; }
@@ -98,7 +98,7 @@ class SubStringStub: public PlatformCodeStub {
 
 class StringCompareStub: public PlatformCodeStub {
  public:
-  StringCompareStub() {}
+  explicit StringCompareStub(Isolate* isolate) : PlatformCodeStub(isolate) {}
 
   // Compares two flat ASCII strings and returns result in rax.
   static void GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
@@ -137,11 +137,16 @@ class NameDictionaryLookupStub: public PlatformCodeStub {
  public:
   enum LookupMode { POSITIVE_LOOKUP, NEGATIVE_LOOKUP };
 
-  NameDictionaryLookupStub(Register dictionary,
+  NameDictionaryLookupStub(Isolate* isolate,
+                           Register dictionary,
                            Register result,
                            Register index,
                            LookupMode mode)
-      : dictionary_(dictionary), result_(result), index_(index), mode_(mode) { }
+      : PlatformCodeStub(isolate),
+        dictionary_(dictionary),
+        result_(result),
+        index_(index),
+        mode_(mode) { }
 
   void Generate(MacroAssembler* masm);
 
@@ -197,12 +202,14 @@ class NameDictionaryLookupStub: public PlatformCodeStub {
 
 class RecordWriteStub: public PlatformCodeStub {
  public:
-  RecordWriteStub(Register object,
+  RecordWriteStub(Isolate* isolate,
+                  Register object,
                   Register value,
                   Register address,
                   RememberedSetAction remembered_set_action,
                   SaveFPRegsMode fp_mode)
-      : object_(object),
+      : PlatformCodeStub(isolate),
+        object_(object),
         value_(value),
         address_(address),
         remembered_set_action_(remembered_set_action),
index a65dd29286d3b91c5fafe790464732d726083885..4fda37c196f8b3a40013915584c63ed6d54d46e6 100644 (file)
@@ -134,7 +134,7 @@ static void Generate_DebugBreakCallHelper(MacroAssembler* masm,
     __ Set(rax, 0);  // No arguments (argc == 0).
     __ Move(rbx, ExternalReference::debug_break(masm->isolate()));
 
-    CEntryStub ceb(1);
+    CEntryStub ceb(masm->isolate(), 1);
     __ CallStub(&ceb);
 
     // Restore the register values from the expression stack.
index 44ffc29b4e8980532632fd99c426f9415da3052b..dbb32b23390a8cb4dc455ecf26be871be3f33777 100644 (file)
@@ -230,7 +230,7 @@ void FullCodeGenerator::Generate() {
       __ Push(info->scope()->GetScopeInfo());
       __ CallRuntime(Runtime::kHiddenNewGlobalContext, 2);
     } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
-      FastNewContextStub stub(heap_slots);
+      FastNewContextStub stub(isolate(), heap_slots);
       __ CallStub(&stub);
     } else {
       __ Push(rdi);
@@ -291,7 +291,7 @@ void FullCodeGenerator::Generate() {
     } else {
       type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
     }
-    ArgumentsAccessStub stub(type);
+    ArgumentsAccessStub stub(isolate(), type);
     __ CallStub(&stub);
 
     SetVar(arguments, rax, rbx, rdx);
@@ -1337,7 +1337,9 @@ void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
       !pretenure &&
       scope()->is_function_scope() &&
       info->num_literals() == 0) {
-    FastNewClosureStub stub(info->strict_mode(), info->is_generator());
+    FastNewClosureStub stub(isolate(),
+                            info->strict_mode(),
+                            info->is_generator());
     __ Move(rbx, info);
     __ CallStub(&stub);
   } else {
@@ -1671,7 +1673,7 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
     __ Move(rbx, Smi::FromInt(expr->literal_index()));
     __ Move(rcx, constant_properties);
     __ Move(rdx, Smi::FromInt(flags));
-    FastCloneShallowObjectStub stub(properties_count);
+    FastCloneShallowObjectStub stub(isolate(), properties_count);
     __ CallStub(&stub);
   }
 
@@ -1806,6 +1808,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
     __ Move(rbx, Smi::FromInt(expr->literal_index()));
     __ Move(rcx, constant_elements);
     FastCloneShallowArrayStub stub(
+        isolate(),
         FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
         allocation_site_mode,
         length);
@@ -1834,7 +1837,9 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
     __ movp(rax, FieldOperand(rbx, JSFunction::kLiteralsOffset));
     __ Move(rbx, Smi::FromInt(expr->literal_index()));
     __ Move(rcx, constant_elements);
-    FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
+    FastCloneShallowArrayStub stub(isolate(),
+                                   mode,
+                                   allocation_site_mode, length);
     __ CallStub(&stub);
   }
 
@@ -1871,7 +1876,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
     } else {
       // Store the subexpression value in the array's elements.
       __ Move(rcx, Smi::FromInt(i));
-      StoreArrayLiteralElementStub stub;
+      StoreArrayLiteralElementStub stub(isolate());
       __ CallStub(&stub);
     }
 
@@ -2121,7 +2126,7 @@ void FullCodeGenerator::VisitYield(Yield* expr) {
       CallIC(ic, TypeFeedbackId::None());
       __ movp(rdi, rax);
       __ movp(Operand(rsp, 2 * kPointerSize), rdi);
-      CallFunctionStub stub(1, CALL_AS_METHOD);
+      CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
       __ CallStub(&stub);
 
       __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
@@ -2329,7 +2334,7 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
 
   __ bind(&stub_call);
   __ movp(rax, rcx);
-  BinaryOpICStub stub(op, mode);
+  BinaryOpICStub stub(isolate(), op, mode);
   CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
   patch_site.EmitPatchInfo();
   __ jmp(&done, Label::kNear);
@@ -2377,7 +2382,7 @@ void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
                                      Token::Value op,
                                      OverwriteMode mode) {
   __ Pop(rdx);
-  BinaryOpICStub stub(op, mode);
+  BinaryOpICStub stub(isolate(), op, mode);
   JumpPatchSite patch_site(masm_);    // unbound, signals no inlined smi code.
   CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
   patch_site.EmitPatchInfo();
@@ -2617,7 +2622,7 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr) {
 
   // Record source position for debugger.
   SetSourcePosition(expr->position());
-  CallFunctionStub stub(arg_count, flags);
+  CallFunctionStub stub(isolate(), arg_count, flags);
   __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
   __ CallStub(&stub);
 
@@ -2659,7 +2664,7 @@ void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
 
   // Record source position for debugger.
   SetSourcePosition(expr->position());
-  CallFunctionStub stub(arg_count, CALL_AS_METHOD);
+  CallFunctionStub stub(isolate(), arg_count, CALL_AS_METHOD);
   __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
   __ CallStub(&stub);
 
@@ -2690,7 +2695,7 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr) {
   __ Move(rdx, Smi::FromInt(expr->CallFeedbackSlot()));
 
   // Record call targets in unoptimized code.
-  CallFunctionStub stub(arg_count, RECORD_CALL_TARGET);
+  CallFunctionStub stub(isolate(), arg_count, RECORD_CALL_TARGET);
   __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
   __ CallStub(&stub);
   RecordJSReturnSite(expr);
@@ -2762,7 +2767,7 @@ void FullCodeGenerator::VisitCall(Call* expr) {
     }
     // Record source position for debugger.
     SetSourcePosition(expr->position());
-    CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS);
+    CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
     __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
     __ CallStub(&stub);
     RecordJSReturnSite(expr);
@@ -2876,7 +2881,7 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
   __ Move(rbx, FeedbackVector());
   __ Move(rdx, Smi::FromInt(expr->CallNewFeedbackSlot()));
 
-  CallConstructStub stub(RECORD_CALL_TARGET);
+  CallConstructStub stub(isolate(), RECORD_CALL_TARGET);
   __ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL);
   PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
   context()->Plug(rax);
@@ -3248,7 +3253,7 @@ void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
   VisitForAccumulatorValue(args->at(0));
   __ movp(rdx, rax);
   __ Move(rax, Smi::FromInt(info_->scope()->num_parameters()));
-  ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
+  ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
   __ CallStub(&stub);
   context()->Plug(rax);
 }
@@ -3361,7 +3366,7 @@ void FullCodeGenerator::EmitLog(CallRuntime* expr) {
 
 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
   // Load the arguments on the stack and call the stub.
-  SubStringStub stub;
+  SubStringStub stub(isolate());
   ZoneList<Expression*>* args = expr->arguments();
   ASSERT(args->length() == 3);
   VisitForStackValue(args->at(0));
@@ -3374,7 +3379,7 @@ void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
 
 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
   // Load the arguments on the stack and call the stub.
-  RegExpExecStub stub;
+  RegExpExecStub stub(isolate());
   ZoneList<Expression*>* args = expr->arguments();
   ASSERT(args->length() == 4);
   VisitForStackValue(args->at(0));
@@ -3524,7 +3529,7 @@ void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
   ASSERT(args->length() == 2);
   VisitForStackValue(args->at(0));
   VisitForStackValue(args->at(1));
-  MathPowStub stub(MathPowStub::ON_STACK);
+  MathPowStub stub(isolate(), MathPowStub::ON_STACK);
   __ CallStub(&stub);
   context()->Plug(rax);
 }
@@ -3565,7 +3570,7 @@ void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
   // Load the argument into rax and call the stub.
   VisitForAccumulatorValue(args->at(0));
 
-  NumberToStringStub stub;
+  NumberToStringStub stub(isolate());
   __ CallStub(&stub);
   context()->Plug(rax);
 }
@@ -3691,7 +3696,7 @@ void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
   VisitForAccumulatorValue(args->at(1));
 
   __ Pop(rdx);
-  StringAddStub stub(STRING_ADD_CHECK_BOTH, NOT_TENURED);
+  StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
   __ CallStub(&stub);
   context()->Plug(rax);
 }
@@ -3704,7 +3709,7 @@ void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
   VisitForStackValue(args->at(0));
   VisitForStackValue(args->at(1));
 
-  StringCompareStub stub;
+  StringCompareStub stub(isolate());
   __ CallStub(&stub);
   context()->Plug(rax);
 }
@@ -3743,7 +3748,7 @@ void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
 
 
 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
-  RegExpConstructResultStub stub;
+  RegExpConstructResultStub stub(isolate());
   ZoneList<Expression*>* args = expr->arguments();
   ASSERT(args->length() == 3);
   VisitForStackValue(args->at(0));
@@ -4169,7 +4174,7 @@ void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
 
     // Record source position of the IC call.
     SetSourcePosition(expr->position());
-    CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS);
+    CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
     __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
     __ CallStub(&stub);
 
@@ -4392,7 +4397,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
     __ bind(&slow);
   }
 
-  ToNumberStub convert_stub;
+  ToNumberStub convert_stub(isolate());
   __ CallStub(&convert_stub);
 
   // Save result for postfix expressions.
@@ -4422,7 +4427,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
   __ bind(&stub_call);
   __ movp(rdx, rax);
   __ Move(rax, Smi::FromInt(1));
-  BinaryOpICStub stub(expr->binary_op(), NO_OVERWRITE);
+  BinaryOpICStub stub(isolate(), expr->binary_op(), NO_OVERWRITE);
   CallIC(stub.GetCode(isolate()), expr->CountBinOpFeedbackId());
   patch_site.EmitPatchInfo();
   __ bind(&done);
@@ -4632,7 +4637,7 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
 
     case Token::INSTANCEOF: {
       VisitForStackValue(expr->right());
-      InstanceofStub stub(InstanceofStub::kNoFlags);
+      InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
       __ CallStub(&stub);
       PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
       __ testp(rax, rax);
index 0cc8d16aab3210ddc42236c5486d85248d9f7943..c12e0b4324459327897b66d4eae66a0988d32ab2 100644 (file)
@@ -219,7 +219,7 @@ bool LCodeGen::GeneratePrologue() {
     Comment(";;; Allocate local context");
     // Argument to NewContext is the function, which is still in rdi.
     if (heap_slots <= FastNewContextStub::kMaximumSlots) {
-      FastNewContextStub stub(heap_slots);
+      FastNewContextStub stub(isolate(), heap_slots);
       __ CallStub(&stub);
     } else {
       __ Push(rdi);
@@ -989,17 +989,17 @@ void LCodeGen::DoCallStub(LCallStub* instr) {
   ASSERT(ToRegister(instr->result()).is(rax));
   switch (instr->hydrogen()->major_key()) {
     case CodeStub::RegExpExec: {
-      RegExpExecStub stub;
+      RegExpExecStub stub(isolate());
       CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
       break;
     }
     case CodeStub::SubString: {
-      SubStringStub stub;
+      SubStringStub stub(isolate());
       CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
       break;
     }
     case CodeStub::StringCompare: {
-      StringCompareStub stub;
+      StringCompareStub stub(isolate());
       CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
       break;
     }
@@ -2029,7 +2029,7 @@ void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
   ASSERT(ToRegister(instr->right()).is(rax));
   ASSERT(ToRegister(instr->result()).is(rax));
 
-  BinaryOpICStub stub(instr->op(), NO_OVERWRITE);
+  BinaryOpICStub stub(isolate(), instr->op(), NO_OVERWRITE);
   CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
 }
 
@@ -2623,7 +2623,7 @@ void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
 
 void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
   ASSERT(ToRegister(instr->context()).is(rsi));
-  InstanceofStub stub(InstanceofStub::kNoFlags);
+  InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
   __ Push(ToRegister(instr->left()));
   __ Push(ToRegister(instr->right()));
   CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
@@ -2709,7 +2709,7 @@ void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
     PushSafepointRegistersScope scope(this);
     InstanceofStub::Flags flags = static_cast<InstanceofStub::Flags>(
         InstanceofStub::kNoFlags | InstanceofStub::kCallSiteInlineCheck);
-    InstanceofStub stub(flags);
+    InstanceofStub stub(isolate(), flags);
 
     __ Push(ToRegister(instr->value()));
     __ Push(instr->function());
@@ -3802,7 +3802,7 @@ void LCodeGen::DoPower(LPower* instr) {
   ASSERT(ToDoubleRegister(instr->result()).is(xmm3));
 
   if (exponent_type.IsSmi()) {
-    MathPowStub stub(MathPowStub::TAGGED);
+    MathPowStub stub(isolate(), MathPowStub::TAGGED);
     __ CallStub(&stub);
   } else if (exponent_type.IsTagged()) {
     Label no_deopt;
@@ -3810,14 +3810,14 @@ void LCodeGen::DoPower(LPower* instr) {
     __ CmpObjectType(exponent, HEAP_NUMBER_TYPE, rcx);
     DeoptimizeIf(not_equal, instr->environment());
     __ bind(&no_deopt);
-    MathPowStub stub(MathPowStub::TAGGED);
+    MathPowStub stub(isolate(), MathPowStub::TAGGED);
     __ CallStub(&stub);
   } else if (exponent_type.IsInteger32()) {
-    MathPowStub stub(MathPowStub::INTEGER);
+    MathPowStub stub(isolate(), MathPowStub::INTEGER);
     __ CallStub(&stub);
   } else {
     ASSERT(exponent_type.IsDouble());
-    MathPowStub stub(MathPowStub::DOUBLE);
+    MathPowStub stub(isolate(), MathPowStub::DOUBLE);
     __ CallStub(&stub);
   }
 }
@@ -3908,7 +3908,7 @@ void LCodeGen::DoCallFunction(LCallFunction* instr) {
   ASSERT(ToRegister(instr->result()).is(rax));
 
   int arity = instr->arity();
-  CallFunctionStub stub(arity, instr->hydrogen()->function_flags());
+  CallFunctionStub stub(isolate(), arity, instr->hydrogen()->function_flags());
   CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
 }
 
@@ -3921,7 +3921,7 @@ void LCodeGen::DoCallNew(LCallNew* instr) {
   __ Set(rax, instr->arity());
   // No cell in ebx for construct type feedback in optimized code
   __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
-  CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
+  CallConstructStub stub(isolate(), NO_CALL_FUNCTION_FLAGS);
   CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
 }
 
@@ -3940,7 +3940,7 @@ void LCodeGen::DoCallNewArray(LCallNewArray* instr) {
           : DONT_OVERRIDE;
 
   if (instr->arity() == 0) {
-    ArrayNoArgumentConstructorStub stub(kind, override_mode);
+    ArrayNoArgumentConstructorStub stub(isolate(), kind, override_mode);
     CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
   } else if (instr->arity() == 1) {
     Label done;
@@ -3953,17 +3953,19 @@ void LCodeGen::DoCallNewArray(LCallNewArray* instr) {
       __ j(zero, &packed_case, Label::kNear);
 
       ElementsKind holey_kind = GetHoleyElementsKind(kind);
-      ArraySingleArgumentConstructorStub stub(holey_kind, override_mode);
+      ArraySingleArgumentConstructorStub stub(isolate(),
+                                              holey_kind,
+                                              override_mode);
       CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
       __ jmp(&done, Label::kNear);
       __ bind(&packed_case);
     }
 
-    ArraySingleArgumentConstructorStub stub(kind, override_mode);
+    ArraySingleArgumentConstructorStub stub(isolate(), kind, override_mode);
     CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
     __ bind(&done);
   } else {
-    ArrayNArgumentsConstructorStub stub(kind, override_mode);
+    ArrayNArgumentsConstructorStub stub(isolate(), kind, override_mode);
     CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
   }
 }
@@ -4411,7 +4413,7 @@ void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) {
     PushSafepointRegistersScope scope(this);
     __ Move(rbx, to_map);
     bool is_js_array = from_map->instance_type() == JS_ARRAY_TYPE;
-    TransitionElementsKindStub stub(from_kind, to_kind, is_js_array);
+    TransitionElementsKindStub stub(isolate(), from_kind, to_kind, is_js_array);
     __ CallStub(&stub);
     RecordSafepointWithLazyDeopt(instr, RECORD_SAFEPOINT_WITH_REGISTERS, 0);
   }
@@ -4433,7 +4435,8 @@ void LCodeGen::DoStringAdd(LStringAdd* instr) {
   ASSERT(ToRegister(instr->context()).is(rsi));
   ASSERT(ToRegister(instr->left()).is(rdx));
   ASSERT(ToRegister(instr->right()).is(rax));
-  StringAddStub stub(instr->hydrogen()->flags(),
+  StringAddStub stub(isolate(),
+                     instr->hydrogen()->flags(),
                      instr->hydrogen()->pretenure_flag());
   CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
 }
@@ -5314,7 +5317,8 @@ void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
   // space for nested functions that don't need literals cloning.
   bool pretenure = instr->hydrogen()->pretenure();
   if (!pretenure && instr->hydrogen()->has_no_literals()) {
-    FastNewClosureStub stub(instr->hydrogen()->strict_mode(),
+    FastNewClosureStub stub(isolate(),
+                            instr->hydrogen()->strict_mode(),
                             instr->hydrogen()->is_generator());
     __ Move(rbx, instr->hydrogen()->shared_info());
     CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
index 1fd7c2c94624a1a7e0b8b5426dbbf18c44d146ad..e59f6d579769803ea8b55b53cad5c17716562d0e 100644 (file)
@@ -259,7 +259,7 @@ void MacroAssembler::RememberedSetHelper(Register object,  // For debug tests.
     j(equal, &done, Label::kNear);
   }
   StoreBufferOverflowStub store_buffer_overflow =
-      StoreBufferOverflowStub(save_fp);
+      StoreBufferOverflowStub(isolate(), save_fp);
   CallStub(&store_buffer_overflow);
   if (and_then == kReturnAtEnd) {
     ret(0);
@@ -437,7 +437,8 @@ void MacroAssembler::RecordWrite(Register object,
                 &done,
                 Label::kNear);
 
-  RecordWriteStub stub(object, value, address, remembered_set_action, fp_mode);
+  RecordWriteStub stub(isolate(), object, value, address, remembered_set_action,
+                       fp_mode);
   CallStub(&stub);
 
   bind(&done);
@@ -608,7 +609,7 @@ void MacroAssembler::CallRuntime(const Runtime::Function* f,
   // smarter.
   Set(rax, num_arguments);
   LoadAddress(rbx, ExternalReference(f, isolate()));
-  CEntryStub ces(f->result_size, save_doubles);
+  CEntryStub ces(isolate(), f->result_size, save_doubles);
   CallStub(&ces);
 }
 
@@ -618,7 +619,7 @@ void MacroAssembler::CallExternalReference(const ExternalReference& ext,
   Set(rax, num_arguments);
   LoadAddress(rbx, ext);
 
-  CEntryStub stub(1);
+  CEntryStub stub(isolate(), 1);
   CallStub(&stub);
 }
 
@@ -827,7 +828,7 @@ void MacroAssembler::JumpToExternalReference(const ExternalReference& ext,
                                              int result_size) {
   // Set the entry point and jump to the C entry runtime stub.
   LoadAddress(rbx, ext);
-  CEntryStub ces(result_size);
+  CEntryStub ces(isolate(), result_size);
   jmp(ces.GetCode(isolate()), RelocInfo::CODE_TARGET);
 }
 
@@ -3302,7 +3303,7 @@ void MacroAssembler::LoadUint32(XMMRegister dst,
 void MacroAssembler::SlowTruncateToI(Register result_reg,
                                      Register input_reg,
                                      int offset) {
-  DoubleToIStub stub(input_reg, result_reg, offset, true);
+  DoubleToIStub stub(isolate(), input_reg, result_reg, offset, true);
   call(stub.GetCode(isolate()), RelocInfo::CODE_TARGET);
 }
 
@@ -3695,7 +3696,7 @@ void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
 void MacroAssembler::DebugBreak() {
   Set(rax, 0);  // No arguments.
   LoadAddress(rbx, ExternalReference(Runtime::kDebugBreak, isolate()));
-  CEntryStub ces(1);
+  CEntryStub ces(isolate(), 1);
   ASSERT(AllowThisStubCall(&ces));
   Call(ces.GetCode(isolate()), RelocInfo::DEBUG_BREAK);
 }
index 93121a05ca89e2d43b8f9ca218fc4756e954b626..2b9f9746066be86005222ebd93b1cc8577edefc6 100644 (file)
@@ -420,7 +420,7 @@ void StubCompiler::GenerateFastApiCall(MacroAssembler* masm,
       api_function_address, function_address, RelocInfo::EXTERNAL_REFERENCE);
 
   // Jump to stub.
-  CallApiFunctionStub stub(is_store, call_data_undefined, argc);
+  CallApiFunctionStub stub(isolate, is_store, call_data_undefined, argc);
   __ TailCallStub(&stub);
 }
 
@@ -940,12 +940,14 @@ void LoadStubCompiler::GenerateLoadField(Register reg,
                                          Representation representation) {
   if (!reg.is(receiver())) __ movp(receiver(), reg);
   if (kind() == Code::LOAD_IC) {
-    LoadFieldStub stub(field.is_inobject(holder),
+    LoadFieldStub stub(isolate(),
+                       field.is_inobject(holder),
                        field.translate(holder),
                        representation);
     GenerateTailCall(masm(), stub.GetCode(isolate()));
   } else {
-    KeyedLoadFieldStub stub(field.is_inobject(holder),
+    KeyedLoadFieldStub stub(isolate(),
+                            field.is_inobject(holder),
                             field.translate(holder),
                             representation);
     GenerateTailCall(masm(), stub.GetCode(isolate()));
@@ -993,7 +995,7 @@ void LoadStubCompiler::GenerateLoadCallback(
   Address getter_address = v8::ToCData<Address>(callback->getter());
   __ Move(api_function_address, getter_address, RelocInfo::EXTERNAL_REFERENCE);
 
-  CallApiGetterStub stub;
+  CallApiGetterStub stub(isolate());
   __ TailCallStub(&stub);
 }
 
index 53cdd161308b903ea92bdf6a25b03c3cfdaaee49..7f5d8645ba6aff40359f42fd61de5623b3b264de 100644 (file)
@@ -53,7 +53,8 @@ ConvertDToIFunc MakeConvertDToIFuncTrampoline(Isolate* isolate,
   CHECK(buffer);
   HandleScope handles(isolate);
   MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size));
-  DoubleToIStub stub(source_reg, destination_reg, 0, true, inline_fastpath);
+  DoubleToIStub stub(isolate, source_reg, destination_reg, 0, true,
+                     inline_fastpath);
 
   byte* start = stub.GetCode(isolate)->instruction_start();
   Label done;
index 7ddefdde14e604e63cfdfc164b47ec50241979d9..0bcb6b7c61a0f7c902a7bda17e3c6faa097ab6e2 100644 (file)
@@ -53,7 +53,8 @@ ConvertDToIFunc MakeConvertDToIFuncTrampoline(Isolate* isolate,
   CHECK(buffer);
   HandleScope handles(isolate);
   MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size));
-  DoubleToIStub stub(source_reg, destination_reg, 0, true, inline_fastpath);
+  DoubleToIStub stub(isolate, source_reg, destination_reg, 0, true,
+                     inline_fastpath);
 
   byte* start = stub.GetCode(isolate)->instruction_start();
   Label done;
index c206a0102a9b0c451f44ce97f745a0b002528d05..e07f21b82e5da5532b137eb52e8b7d8930bdb9ee 100644 (file)
@@ -55,7 +55,7 @@ ConvertDToIFunc MakeConvertDToIFuncTrampoline(Isolate* isolate,
   MacroAssembler assm(isolate, buffer, static_cast<int>(actual_size));
   int offset =
     source_reg.is(esp) ? 0 : (HeapNumber::kValueOffset - kSmiTagSize);
-  DoubleToIStub stub(source_reg, destination_reg, offset, true);
+  DoubleToIStub stub(isolate, source_reg, destination_reg, offset, true);
   byte* start = stub.GetCode(isolate)->instruction_start();
 
   __ push(ebx);
index 348b21aca5958612a1a083db98ed88bdd61e8ce0..a63209ffa3125b28e41f7199a40e7078ce36cbd8 100644 (file)
@@ -54,7 +54,7 @@ ConvertDToIFunc MakeConvertDToIFuncTrampoline(Isolate* isolate,
   MacroAssembler assm(isolate, buffer, static_cast<int>(actual_size));
   int offset =
     source_reg.is(rsp) ? 0 : (HeapNumber::kValueOffset - kSmiTagSize);
-  DoubleToIStub stub(source_reg, destination_reg, offset, true);
+  DoubleToIStub stub(isolate, source_reg, destination_reg, offset, true);
   byte* start = stub.GetCode(isolate)->instruction_start();
 
   __ pushq(rbx);