if (is_construct) {
// No type feedback cell is available
__ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
- CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
+ CallConstructStub stub(masm->isolate(), NO_CALL_FUNCTION_FLAGS);
__ CallStub(&stub);
} else {
ParameterCount actual(a0);
void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) {
// Update the static counter each time a new code stub is generated.
- Isolate* isolate = masm->isolate();
- isolate->counters()->code_stubs()->Increment();
+ isolate()->counters()->code_stubs()->Increment();
- CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(isolate);
+ CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(isolate());
int param_count = descriptor->register_param_count_;
{
// Call the runtime system in a fresh internal frame.
// stub so you don't have to set up the frame.
class ConvertToDoubleStub : public PlatformCodeStub {
public:
- ConvertToDoubleStub(Register result_reg_1,
+ ConvertToDoubleStub(Isolate* isolate,
+ Register result_reg_1,
Register result_reg_2,
Register source_reg,
Register scratch_reg)
- : result1_(result_reg_1),
+ : PlatformCodeStub(isolate),
+ result1_(result_reg_1),
result2_(result_reg_2),
source_(source_reg),
zeros_(scratch_reg) { }
void WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime(
Isolate* isolate) {
- WriteInt32ToHeapNumberStub stub1(a1, v0, a2, a3);
- WriteInt32ToHeapNumberStub stub2(a2, v0, a3, a0);
+ WriteInt32ToHeapNumberStub stub1(isolate, a1, v0, a2, a3);
+ WriteInt32ToHeapNumberStub stub2(isolate, a2, v0, a3, a0);
stub1.GetCode(isolate);
stub2.GetCode(isolate);
}
// f12, f14 are the double representations of the left hand side
// and the right hand side if we have FPU. Otherwise a2, a3 represent
// left hand side and a0, a1 represent right hand side.
-
- Isolate* isolate = masm->isolate();
Label nan;
__ li(t0, Operand(LESS));
__ li(t1, Operand(GREATER));
__ JumpIfNonSmisNotBothSequentialAsciiStrings(lhs, rhs, a2, a3, &slow);
- __ IncrementCounter(isolate->counters()->string_compare_native(), 1, a2, a3);
+ __ IncrementCounter(isolate()->counters()->string_compare_native(), 1, a2,
+ a3);
if (cc == eq) {
StringCompareStub::GenerateFlatAsciiStringEquals(masm,
lhs,
AllowExternalCallThatCantCauseGC scope(masm);
__ PrepareCallCFunction(argument_count, fp_argument_count, scratch);
- __ li(a0, Operand(ExternalReference::isolate_address(masm->isolate())));
+ __ li(a0, Operand(ExternalReference::isolate_address(isolate())));
__ CallCFunction(
- ExternalReference::store_buffer_overflow_function(masm->isolate()),
+ ExternalReference::store_buffer_overflow_function(isolate()),
argument_count);
if (save_doubles_ == kSaveFPRegs) {
__ MultiPopFPU(kCallerSavedFPU);
__ PrepareCallCFunction(0, 2, scratch2);
__ MovToFloatParameters(double_base, double_exponent);
__ CallCFunction(
- ExternalReference::power_double_double_function(masm->isolate()),
+ ExternalReference::power_double_double_function(isolate()),
0, 2);
}
__ pop(ra);
__ cvt_d_w(double_exponent, single_scratch);
// Returning or bailing out.
- Counters* counters = masm->isolate()->counters();
+ Counters* counters = isolate()->counters();
if (exponent_type_ == ON_STACK) {
// The arguments are still on the stack.
__ bind(&call_runtime);
__ PrepareCallCFunction(0, 2, scratch);
__ MovToFloatParameters(double_base, double_exponent);
__ CallCFunction(
- ExternalReference::power_double_double_function(masm->isolate()),
+ ExternalReference::power_double_double_function(isolate()),
0, 2);
}
__ pop(ra);
void StoreRegistersStateStub::GenerateAheadOfTime(
Isolate* isolate) {
- StoreRegistersStateStub stub1(kDontSaveFPRegs);
+ StoreRegistersStateStub stub1(isolate, kDontSaveFPRegs);
stub1.GetCode(isolate);
// Hydrogen code stubs need stub2 at snapshot time.
- StoreRegistersStateStub stub2(kSaveFPRegs);
+ StoreRegistersStateStub stub2(isolate, kSaveFPRegs);
stub2.GetCode(isolate);
}
void RestoreRegistersStateStub::GenerateAheadOfTime(
Isolate* isolate) {
- RestoreRegistersStateStub stub1(kDontSaveFPRegs);
+ RestoreRegistersStateStub stub1(isolate, kDontSaveFPRegs);
stub1.GetCode(isolate);
// Hydrogen code stubs need stub2 at snapshot time.
- RestoreRegistersStateStub stub2(kSaveFPRegs);
+ RestoreRegistersStateStub stub2(isolate, kSaveFPRegs);
stub2.GetCode(isolate);
}
void CodeStub::GenerateFPStubs(Isolate* isolate) {
SaveFPRegsMode mode = kSaveFPRegs;
- CEntryStub save_doubles(1, mode);
- StoreBufferOverflowStub stub(mode);
+ CEntryStub save_doubles(isolate, 1, mode);
+ StoreBufferOverflowStub stub(isolate, mode);
// These stubs might already be in the snapshot, detect that and don't
// regenerate, which would lead to code stub initialization state being messed
// up.
void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
- CEntryStub stub(1, kDontSaveFPRegs);
+ CEntryStub stub(isolate, 1, kDontSaveFPRegs);
stub.GetCode(isolate);
}
// s1: pointer to first argument (C callee-saved)
// s2: pointer to builtin function (C callee-saved)
- Isolate* isolate = masm->isolate();
-
// Prepare arguments for C routine.
// a0 = argc
__ mov(a0, s0);
__ AssertStackIsAligned();
- __ li(a2, Operand(ExternalReference::isolate_address(isolate)));
+ __ li(a2, Operand(ExternalReference::isolate_address(isolate())));
// To let the GC traverse the return address of the exit frames, we need to
// know where the return address is. The CEntryStub is unmovable, so
__ Branch(&exception_returned, eq, t0, Operand(v0));
ExternalReference pending_exception_address(
- Isolate::kPendingExceptionAddress, isolate);
+ Isolate::kPendingExceptionAddress, isolate());
// Check that there is no pending exception, otherwise we
// should have returned the exception sentinel.
__ lw(v0, MemOperand(a2));
// Clear the pending exception.
- __ li(a3, Operand(isolate->factory()->the_hole_value()));
+ __ li(a3, Operand(isolate()->factory()->the_hole_value()));
__ sw(a3, MemOperand(a2));
// Special handling of termination exceptions which are uncatchable
__ Branch(&object_not_null,
ne,
scratch,
- Operand(masm->isolate()->factory()->null_value()));
+ Operand(isolate()->factory()->null_value()));
__ li(v0, Operand(Smi::FromInt(1)));
__ DropAndRet(HasArgsInRegisters() ? 0 : 2);
// -- a1 : receiver
// -----------------------------------
__ Branch(&miss, ne, a0,
- Operand(masm->isolate()->factory()->prototype_string()));
+ Operand(isolate()->factory()->prototype_string()));
receiver = a1;
} else {
ASSERT(kind() == Code::LOAD_IC);
const int kSubjectOffset = 2 * kPointerSize;
const int kJSRegExpOffset = 3 * kPointerSize;
- Isolate* isolate = masm->isolate();
-
Label runtime;
// Allocation of registers for this function. These are in callee save
// registers and will be preserved by the call to the native RegExp code, as
// Ensure that a RegExp stack is allocated.
ExternalReference address_of_regexp_stack_memory_address =
ExternalReference::address_of_regexp_stack_memory_address(
- isolate);
+ isolate());
ExternalReference address_of_regexp_stack_memory_size =
- ExternalReference::address_of_regexp_stack_memory_size(isolate);
+ ExternalReference::address_of_regexp_stack_memory_size(isolate());
__ li(a0, Operand(address_of_regexp_stack_memory_size));
__ lw(a0, MemOperand(a0, 0));
__ Branch(&runtime, eq, a0, Operand(zero_reg));
// subject: Subject string
// regexp_data: RegExp data (FixedArray)
// All checks done. Now push arguments for native regexp code.
- __ IncrementCounter(isolate->counters()->regexp_entry_native(),
+ __ IncrementCounter(isolate()->counters()->regexp_entry_native(),
1, a0, a2);
// Isolates: note we add an additional parameter here (isolate pointer).
// Argument 9: Pass current isolate address.
// CFunctionArgumentOperand handles MIPS stack argument slots.
- __ li(a0, Operand(ExternalReference::isolate_address(isolate)));
+ __ li(a0, Operand(ExternalReference::isolate_address(isolate())));
__ sw(a0, MemOperand(sp, 5 * kPointerSize));
// Argument 8: Indicate that this is a direct call from JavaScript.
// Argument 5: static offsets vector buffer.
__ li(a0, Operand(
- ExternalReference::address_of_static_offsets_vector(isolate)));
+ ExternalReference::address_of_static_offsets_vector(isolate())));
__ sw(a0, MemOperand(sp, 1 * kPointerSize));
// For arguments 4 and 3 get string length, calculate start of string data
// Locate the code entry and call it.
__ Addu(t9, t9, Operand(Code::kHeaderSize - kHeapObjectTag));
- DirectCEntryStub stub;
+ DirectCEntryStub stub(isolate());
stub.GenerateCall(masm, t9);
__ LeaveExitFrame(false, no_reg, true);
// stack overflow (on the backtrack stack) was detected in RegExp code but
// haven't created the exception yet. Handle that in the runtime system.
// TODO(592): Rerunning the RegExp to get the stack overflow exception.
- __ li(a1, Operand(isolate->factory()->the_hole_value()));
+ __ li(a1, Operand(isolate()->factory()->the_hole_value()));
__ li(a2, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
- isolate)));
+ isolate())));
__ lw(v0, MemOperand(a2, 0));
__ Branch(&runtime, eq, v0, Operand(a1));
__ bind(&failure);
// For failure and exception return null.
- __ li(v0, Operand(isolate->factory()->null_value()));
+ __ li(v0, Operand(isolate()->factory()->null_value()));
__ DropAndRet(4);
// Process the result from the native regexp code.
// Get the static offsets vector filled by the native regexp code.
ExternalReference address_of_static_offsets_vector =
- ExternalReference::address_of_static_offsets_vector(isolate);
+ ExternalReference::address_of_static_offsets_vector(isolate());
__ li(a2, Operand(address_of_static_offsets_vector));
// a1: number of capture registers
__ SmiTag(a0);
__ MultiPush(kSavedRegs);
- CreateAllocationSiteStub create_stub;
+ CreateAllocationSiteStub create_stub(masm->isolate());
__ CallStub(&create_stub);
__ MultiPop(kSavedRegs);
// If there is a call target cache, mark it megamorphic in the
// non-function case. MegamorphicSentinel is an immortal immovable
// object (megamorphic symbol) so no write barrier is needed.
- ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(masm->isolate()),
- masm->isolate()->heap()->megamorphic_symbol());
+ ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(isolate()),
+ isolate()->heap()->megamorphic_symbol());
__ sll(t1, a3, kPointerSizeLog2 - kSmiTagSize);
__ Addu(t1, a2, Operand(t1));
__ LoadRoot(at, Heap::kMegamorphicSymbolRootIndex);
__ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY);
{
Handle<Code> adaptor =
- masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
+ isolate()->builtins()->ArgumentsAdaptorTrampoline();
__ Jump(adaptor, RelocInfo::CODE_TARGET);
}
__ li(a0, Operand(argc_)); // Set up the number of arguments.
__ li(a2, Operand(0, RelocInfo::NONE32));
__ GetBuiltinFunction(a1, Builtins::CALL_NON_FUNCTION);
- __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
+ __ Jump(isolate()->builtins()->ArgumentsAdaptorTrampoline(),
RelocInfo::CODE_TARGET);
}
__ bind(&do_call);
// Set expected number of arguments to zero (not changing r0).
__ li(a2, Operand(0, RelocInfo::NONE32));
- __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
+ __ Jump(isolate()->builtins()->ArgumentsAdaptorTrampoline(),
RelocInfo::CODE_TARGET);
}
masm, a1, t1, a2, a3, t0, t2, t3, t4, DEST_ALWAYS_ALIGNED);
__ bind(&return_v0);
- Counters* counters = masm->isolate()->counters();
+ Counters* counters = isolate()->counters();
__ IncrementCounter(counters->sub_string_native(), 1, a3, t0);
__ DropAndRet(3);
void StringCompareStub::Generate(MacroAssembler* masm) {
Label runtime;
- Counters* counters = masm->isolate()->counters();
+ Counters* counters = isolate()->counters();
// Stack frame on entry.
// sp[0]: right string
// -- a0 : right
// -- ra : return address
// -----------------------------------
- Isolate* isolate = masm->isolate();
// Load a2 with the allocation site. We stick an undefined dummy value here
// and replace it with the real allocation site later when we instantiate this
// stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate().
- __ li(a2, handle(isolate->heap()->undefined_value()));
+ __ li(a2, handle(isolate()->heap()->undefined_value()));
// Make sure that we actually patched the allocation site.
if (FLAG_debug_code) {
// Tail call into the stub that handles binary operations with allocation
// sites.
- BinaryOpWithAllocationSiteStub stub(state_);
+ BinaryOpWithAllocationSiteStub stub(isolate(), state_);
__ TailCallStub(&stub);
}
__ bind(&unordered);
__ bind(&generic_stub);
- ICCompareStub stub(op_, CompareIC::GENERIC, CompareIC::GENERIC,
+ ICCompareStub stub(isolate(), op_, CompareIC::GENERIC, CompareIC::GENERIC,
CompareIC::GENERIC);
- __ Jump(stub.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
+ __ Jump(stub.GetCode(isolate()), RelocInfo::CODE_TARGET);
__ bind(&maybe_undefined1);
if (Token::IsOrderedRelationalCompareOp(op_)) {
{
// Call the runtime system in a fresh internal frame.
ExternalReference miss =
- ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate());
+ ExternalReference(IC_Utility(IC::kCompareIC_Miss), isolate());
FrameScope scope(masm, StackFrame::INTERNAL);
__ Push(a1, a0);
__ Push(ra, a1, a0);
void DirectCEntryStub::GenerateCall(MacroAssembler* masm,
Register target) {
intptr_t loc =
- reinterpret_cast<intptr_t>(GetCode(masm->isolate()).location());
+ reinterpret_cast<intptr_t>(GetCode(isolate()).location());
__ Move(t9, target);
__ li(ra, Operand(loc, RelocInfo::CODE_TARGET), CONSTANT_SIZE);
__ Call(ra);
__ MultiPush(spill_mask);
__ lw(a0, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
__ li(a1, Operand(Handle<Name>(name)));
- NameDictionaryLookupStub stub(NEGATIVE_LOOKUP);
+ NameDictionaryLookupStub stub(masm->isolate(), NEGATIVE_LOOKUP);
__ CallStub(&stub);
__ mov(at, v0);
__ MultiPop(spill_mask);
__ Move(a0, elements);
__ Move(a1, name);
}
- NameDictionaryLookupStub stub(POSITIVE_LOOKUP);
+ NameDictionaryLookupStub stub(masm->isolate(), POSITIVE_LOOKUP);
__ CallStub(&stub);
__ mov(scratch2, a2);
__ mov(at, v0);
void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
Isolate* isolate) {
- StoreBufferOverflowStub stub1(kDontSaveFPRegs);
+ StoreBufferOverflowStub stub1(isolate, kDontSaveFPRegs);
stub1.GetCode(isolate);
// Hydrogen code stubs need stub2 at snapshot time.
- StoreBufferOverflowStub stub2(kSaveFPRegs);
+ StoreBufferOverflowStub stub2(isolate, kSaveFPRegs);
stub2.GetCode(isolate);
}
__ Move(address, regs_.address());
__ Move(a0, regs_.object());
__ Move(a1, address);
- __ li(a2, Operand(ExternalReference::isolate_address(masm->isolate())));
+ __ li(a2, Operand(ExternalReference::isolate_address(isolate())));
AllowExternalCallThatCantCauseGC scope(masm);
__ CallCFunction(
- ExternalReference::incremental_marking_record_write_function(
- masm->isolate()),
+ ExternalReference::incremental_marking_record_write_function(isolate()),
argument_count);
regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_);
}
void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
- CEntryStub ces(1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs);
- __ Call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
+ CEntryStub ces(isolate(), 1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs);
+ __ Call(ces.GetCode(isolate()), RelocInfo::CODE_TARGET);
int parameter_count_offset =
StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset;
__ lw(a1, MemOperand(fp, parameter_count_offset));
void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
if (masm->isolate()->function_entry_hook() != NULL) {
- ProfileEntryHookStub stub;
+ ProfileEntryHookStub stub(masm->isolate());
__ push(ra);
__ CallStub(&stub);
__ pop(ra);
__ Subu(sp, sp, kCArgsSlotsSize);
#if defined(V8_HOST_ARCH_MIPS)
int32_t entry_hook =
- reinterpret_cast<int32_t>(masm->isolate()->function_entry_hook());
+ reinterpret_cast<int32_t>(isolate()->function_entry_hook());
__ li(t9, Operand(entry_hook));
#else
// Under the simulator we need to indirect the entry hook through a
// trampoline function at a known address.
// It additionally takes an isolate as a third parameter.
- __ li(a2, Operand(ExternalReference::isolate_address(masm->isolate())));
+ __ li(a2, Operand(ExternalReference::isolate_address(isolate())));
ApiFunction dispatcher(FUNCTION_ADDR(EntryHookTrampoline));
__ li(t9, Operand(ExternalReference(&dispatcher,
ExternalReference::BUILTIN_CALL,
- masm->isolate())));
+ isolate())));
#endif
// Call C function through t9 to conform ABI for PIC.
__ Call(t9);
static void CreateArrayDispatch(MacroAssembler* masm,
AllocationSiteOverrideMode mode) {
if (mode == DISABLE_ALLOCATION_SITES) {
- T stub(GetInitialFastElementsKind(), mode);
+ T stub(masm->isolate(), GetInitialFastElementsKind(), mode);
__ TailCallStub(&stub);
} else if (mode == DONT_OVERRIDE) {
int last_index = GetSequenceIndexFromFastElementsKind(
TERMINAL_FAST_ELEMENTS_KIND);
for (int i = 0; i <= last_index; ++i) {
ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
- T stub(kind);
+ T stub(masm->isolate(), kind);
__ TailCallStub(&stub, eq, a3, Operand(kind));
}
ElementsKind initial = GetInitialFastElementsKind();
ElementsKind holey_initial = GetHoleyElementsKind(initial);
- ArraySingleArgumentConstructorStub stub_holey(holey_initial,
+ ArraySingleArgumentConstructorStub stub_holey(masm->isolate(),
+ holey_initial,
DISABLE_ALLOCATION_SITES);
__ TailCallStub(&stub_holey);
__ bind(&normal_sequence);
- ArraySingleArgumentConstructorStub stub(initial,
+ ArraySingleArgumentConstructorStub stub(masm->isolate(),
+ initial,
DISABLE_ALLOCATION_SITES);
__ TailCallStub(&stub);
} else if (mode == DONT_OVERRIDE) {
TERMINAL_FAST_ELEMENTS_KIND);
for (int i = 0; i <= last_index; ++i) {
ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
- ArraySingleArgumentConstructorStub stub(kind);
+ ArraySingleArgumentConstructorStub stub(masm->isolate(), kind);
__ TailCallStub(&stub, eq, a3, Operand(kind));
}
TERMINAL_FAST_ELEMENTS_KIND);
for (int i = 0; i <= to_index; ++i) {
ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
- T stub(kind);
+ T stub(isolate, kind);
stub.GetCode(isolate);
if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) {
- T stub1(kind, DISABLE_ALLOCATION_SITES);
+ T stub1(isolate, kind, DISABLE_ALLOCATION_SITES);
stub1.GetCode(isolate);
}
}
ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS };
for (int i = 0; i < 2; i++) {
// For internal arrays we only need a few things.
- InternalArrayNoArgumentConstructorStub stubh1(kinds[i]);
+ InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[i]);
stubh1.GetCode(isolate);
- InternalArraySingleArgumentConstructorStub stubh2(kinds[i]);
+ InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]);
stubh2.GetCode(isolate);
- InternalArrayNArgumentsConstructorStub stubh3(kinds[i]);
+ InternalArrayNArgumentsConstructorStub stubh3(isolate, kinds[i]);
stubh3.GetCode(isolate);
}
}
void InternalArrayConstructorStub::GenerateCase(
MacroAssembler* masm, ElementsKind kind) {
- InternalArrayNoArgumentConstructorStub stub0(kind);
+ InternalArrayNoArgumentConstructorStub stub0(isolate(), kind);
__ TailCallStub(&stub0, lo, a0, Operand(1));
- InternalArrayNArgumentsConstructorStub stubN(kind);
+ InternalArrayNArgumentsConstructorStub stubN(isolate(), kind);
__ TailCallStub(&stubN, hi, a0, Operand(1));
if (IsFastPackedElementsKind(kind)) {
__ lw(at, MemOperand(sp, 0));
InternalArraySingleArgumentConstructorStub
- stub1_holey(GetHoleyElementsKind(kind));
+ stub1_holey(isolate(), GetHoleyElementsKind(kind));
__ TailCallStub(&stub1_holey, ne, at, Operand(zero_reg));
}
- InternalArraySingleArgumentConstructorStub stub1(kind);
+ InternalArraySingleArgumentConstructorStub stub1(isolate(), kind);
__ TailCallStub(&stub1);
}
STATIC_ASSERT(FCA::kHolderIndex == 0);
STATIC_ASSERT(FCA::kArgsLength == 7);
- Isolate* isolate = masm->isolate();
-
// Save context, callee and call data.
__ Push(context, callee, call_data);
// Load context from callee.
// Push return value and default return value.
__ Push(scratch, scratch);
__ li(scratch,
- Operand(ExternalReference::isolate_address(isolate)));
+ Operand(ExternalReference::isolate_address(isolate())));
// Push isolate and holder.
__ Push(scratch, holder);
ExternalReference::Type thunk_type = ExternalReference::PROFILING_API_CALL;
ApiFunction thunk_fun(thunk_address);
ExternalReference thunk_ref = ExternalReference(&thunk_fun, thunk_type,
- masm->isolate());
+ isolate());
AllowExternalCallThatCantCauseGC scope(masm);
MemOperand context_restore_operand(
ExternalReference::PROFILING_GETTER_CALL;
ApiFunction thunk_fun(thunk_address);
ExternalReference thunk_ref = ExternalReference(&thunk_fun, thunk_type,
- masm->isolate());
+ isolate());
__ CallApiFunctionAndReturn(api_function_address,
thunk_ref,
kStackUnwindSpace,
class StoreBufferOverflowStub: public PlatformCodeStub {
public:
- explicit StoreBufferOverflowStub(SaveFPRegsMode save_fp)
- : save_doubles_(save_fp) {}
+ StoreBufferOverflowStub(Isolate* isolate, SaveFPRegsMode save_fp)
+ : PlatformCodeStub(isolate), save_doubles_(save_fp) {}
void Generate(MacroAssembler* masm);
class SubStringStub: public PlatformCodeStub {
public:
- SubStringStub() {}
+ explicit SubStringStub(Isolate* isolate) : PlatformCodeStub(isolate) {}
private:
Major MajorKey() { return SubString; }
class StoreRegistersStateStub: public PlatformCodeStub {
public:
- explicit StoreRegistersStateStub(SaveFPRegsMode with_fp)
- : save_doubles_(with_fp) {}
+ explicit StoreRegistersStateStub(Isolate* isolate, SaveFPRegsMode with_fp)
+ : PlatformCodeStub(isolate), save_doubles_(with_fp) {}
static void GenerateAheadOfTime(Isolate* isolate);
private:
class RestoreRegistersStateStub: public PlatformCodeStub {
public:
- explicit RestoreRegistersStateStub(SaveFPRegsMode with_fp)
- : save_doubles_(with_fp) {}
+ explicit RestoreRegistersStateStub(Isolate* isolate, SaveFPRegsMode with_fp)
+ : PlatformCodeStub(isolate), save_doubles_(with_fp) {}
static void GenerateAheadOfTime(Isolate* isolate);
private:
class StringCompareStub: public PlatformCodeStub {
public:
- StringCompareStub() { }
+ explicit StringCompareStub(Isolate* isolate) : PlatformCodeStub(isolate) { }
// Compare two flat ASCII strings and returns result in v0.
static void GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
// so you don't have to set up the frame.
class WriteInt32ToHeapNumberStub : public PlatformCodeStub {
public:
- WriteInt32ToHeapNumberStub(Register the_int,
+ WriteInt32ToHeapNumberStub(Isolate* isolate,
+ Register the_int,
Register the_heap_number,
Register scratch,
Register scratch2)
- : the_int_(the_int),
+ : PlatformCodeStub(isolate),
+ the_int_(the_int),
the_heap_number_(the_heap_number),
scratch_(scratch),
sign_(scratch2) {
class RecordWriteStub: public PlatformCodeStub {
public:
- RecordWriteStub(Register object,
+ RecordWriteStub(Isolate* isolate,
+ Register object,
Register value,
Register address,
RememberedSetAction remembered_set_action,
SaveFPRegsMode fp_mode)
- : object_(object),
+ : PlatformCodeStub(isolate),
+ object_(object),
value_(value),
address_(address),
remembered_set_action_(remembered_set_action),
// moved by GC
class DirectCEntryStub: public PlatformCodeStub {
public:
- DirectCEntryStub() {}
+ explicit DirectCEntryStub(Isolate* isolate) : PlatformCodeStub(isolate) {}
void Generate(MacroAssembler* masm);
void GenerateCall(MacroAssembler* masm, Register target);
public:
enum LookupMode { POSITIVE_LOOKUP, NEGATIVE_LOOKUP };
- explicit NameDictionaryLookupStub(LookupMode mode) : mode_(mode) { }
+ NameDictionaryLookupStub(Isolate* isolate, LookupMode mode)
+ : PlatformCodeStub(isolate), mode_(mode) { }
void Generate(MacroAssembler* masm);
__ PrepareCEntryArgs(0); // No arguments.
__ PrepareCEntryFunction(ExternalReference::debug_break(masm->isolate()));
- CEntryStub ceb(1);
+ CEntryStub ceb(masm->isolate(), 1);
__ CallStub(&ceb);
// Restore the register values from the expression stack.
__ Push(info->scope()->GetScopeInfo());
__ CallRuntime(Runtime::kHiddenNewGlobalContext, 2);
} else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
- FastNewContextStub stub(heap_slots);
+ FastNewContextStub stub(isolate(), heap_slots);
__ CallStub(&stub);
} else {
__ push(a1);
} else {
type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
}
- ArgumentsAccessStub stub(type);
+ ArgumentsAccessStub stub(isolate(), type);
__ CallStub(&stub);
SetVar(arguments, v0, a1, a2);
!pretenure &&
scope()->is_function_scope() &&
info->num_literals() == 0) {
- FastNewClosureStub stub(info->strict_mode(), info->is_generator());
+ FastNewClosureStub stub(isolate(),
+ info->strict_mode(),
+ info->is_generator());
__ li(a2, Operand(info));
__ CallStub(&stub);
} else {
__ Push(a3, a2, a1, a0);
__ CallRuntime(Runtime::kHiddenCreateObjectLiteral, 4);
} else {
- FastCloneShallowObjectStub stub(properties_count);
+ FastCloneShallowObjectStub stub(isolate(), properties_count);
__ CallStub(&stub);
}
if (has_fast_elements && constant_elements_values->map() ==
isolate()->heap()->fixed_cow_array_map()) {
FastCloneShallowArrayStub stub(
+ isolate(),
FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
allocation_site_mode,
length);
mode = FastCloneShallowArrayStub::CLONE_ELEMENTS;
}
- FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
+ FastCloneShallowArrayStub stub(isolate(), mode, allocation_site_mode,
+ length);
__ CallStub(&stub);
}
} else {
__ li(a3, Operand(Smi::FromInt(i)));
__ mov(a0, result_register());
- StoreArrayLiteralElementStub stub;
+ StoreArrayLiteralElementStub stub(isolate());
__ CallStub(&stub);
}
__ mov(a0, v0);
__ mov(a1, a0);
__ sw(a1, MemOperand(sp, 2 * kPointerSize));
- CallFunctionStub stub(1, CALL_AS_METHOD);
+ CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
__ CallStub(&stub);
__ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
patch_site.EmitJumpIfSmi(scratch1, &smi_case);
__ bind(&stub_call);
- BinaryOpICStub stub(op, mode);
+ BinaryOpICStub stub(isolate(), op, mode);
CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
patch_site.EmitPatchInfo();
__ jmp(&done);
OverwriteMode mode) {
__ mov(a0, result_register());
__ pop(a1);
- BinaryOpICStub stub(op, mode);
+ BinaryOpICStub stub(isolate(), op, mode);
JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
patch_site.EmitPatchInfo();
}
// Record source position for debugger.
SetSourcePosition(expr->position());
- CallFunctionStub stub(arg_count, flags);
+ CallFunctionStub stub(isolate(), arg_count, flags);
__ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
__ CallStub(&stub);
// Record source position for debugger.
SetSourcePosition(expr->position());
- CallFunctionStub stub(arg_count, CALL_AS_METHOD);
+ CallFunctionStub stub(isolate(), arg_count, CALL_AS_METHOD);
__ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
__ CallStub(&stub);
__ li(a3, Operand(Smi::FromInt(expr->CallFeedbackSlot())));
// Record call targets in unoptimized code.
- CallFunctionStub stub(arg_count, RECORD_CALL_TARGET);
+ CallFunctionStub stub(isolate(), arg_count, RECORD_CALL_TARGET);
__ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
__ CallStub(&stub);
RecordJSReturnSite(expr);
}
// Record source position for debugger.
SetSourcePosition(expr->position());
- CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS);
+ CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
__ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
__ CallStub(&stub);
RecordJSReturnSite(expr);
__ li(a2, FeedbackVector());
__ li(a3, Operand(Smi::FromInt(expr->CallNewFeedbackSlot())));
- CallConstructStub stub(RECORD_CALL_TARGET);
+ CallConstructStub stub(isolate(), RECORD_CALL_TARGET);
__ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL);
PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
context()->Plug(v0);
VisitForAccumulatorValue(args->at(0));
__ mov(a1, v0);
__ li(a0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
- ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
+ ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
__ CallStub(&stub);
context()->Plug(v0);
}
void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
// Load the arguments on the stack and call the stub.
- SubStringStub stub;
+ SubStringStub stub(isolate());
ZoneList<Expression*>* args = expr->arguments();
ASSERT(args->length() == 3);
VisitForStackValue(args->at(0));
void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
// Load the arguments on the stack and call the stub.
- RegExpExecStub stub;
+ RegExpExecStub stub(isolate());
ZoneList<Expression*>* args = expr->arguments();
ASSERT(args->length() == 4);
VisitForStackValue(args->at(0));
ASSERT(args->length() == 2);
VisitForStackValue(args->at(0));
VisitForStackValue(args->at(1));
- MathPowStub stub(MathPowStub::ON_STACK);
+ MathPowStub stub(isolate(), MathPowStub::ON_STACK);
__ CallStub(&stub);
context()->Plug(v0);
}
VisitForAccumulatorValue(args->at(0));
__ mov(a0, result_register());
- NumberToStringStub stub;
+ NumberToStringStub stub(isolate());
__ CallStub(&stub);
context()->Plug(v0);
}
__ pop(a1);
__ mov(a0, result_register()); // StringAddStub requires args in a0, a1.
- StringAddStub stub(STRING_ADD_CHECK_BOTH, NOT_TENURED);
+ StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
__ CallStub(&stub);
context()->Plug(v0);
}
VisitForStackValue(args->at(0));
VisitForStackValue(args->at(1));
- StringCompareStub stub;
+ StringCompareStub stub(isolate());
__ CallStub(&stub);
context()->Plug(v0);
}
void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
- RegExpConstructResultStub stub;
+ RegExpConstructResultStub stub(isolate());
ZoneList<Expression*>* args = expr->arguments();
ASSERT(args->length() == 3);
VisitForStackValue(args->at(0));
// Record source position of the IC call.
SetSourcePosition(expr->position());
- CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS);
+ CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
__ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
__ CallStub(&stub);
__ jmp(&stub_call);
__ bind(&slow);
}
- ToNumberStub convert_stub;
+ ToNumberStub convert_stub(isolate());
__ CallStub(&convert_stub);
// Save result for postfix expressions.
// Record position before stub call.
SetSourcePosition(expr->position());
- BinaryOpICStub stub(Token::ADD, NO_OVERWRITE);
+ BinaryOpICStub stub(isolate(), Token::ADD, NO_OVERWRITE);
CallIC(stub.GetCode(isolate()), expr->CountBinOpFeedbackId());
patch_site.EmitPatchInfo();
__ bind(&done);
case Token::INSTANCEOF: {
VisitForStackValue(expr->right());
- InstanceofStub stub(InstanceofStub::kNoFlags);
+ InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
__ CallStub(&stub);
PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
// The stub returns 0 for true.
Comment(";;; Allocate local context");
// Argument to NewContext is the function, which is in a1.
if (heap_slots <= FastNewContextStub::kMaximumSlots) {
- FastNewContextStub stub(heap_slots);
+ FastNewContextStub stub(isolate(), heap_slots);
__ CallStub(&stub);
} else {
__ push(a1);
ASSERT(ToRegister(instr->result()).is(v0));
switch (instr->hydrogen()->major_key()) {
case CodeStub::RegExpExec: {
- RegExpExecStub stub;
+ RegExpExecStub stub(isolate());
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
break;
}
case CodeStub::SubString: {
- SubStringStub stub;
+ SubStringStub stub(isolate());
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
break;
}
case CodeStub::StringCompare: {
- StringCompareStub stub;
+ StringCompareStub stub(isolate());
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
break;
}
ASSERT(ToRegister(instr->right()).is(a0));
ASSERT(ToRegister(instr->result()).is(v0));
- BinaryOpICStub stub(instr->op(), NO_OVERWRITE);
+ BinaryOpICStub stub(isolate(), instr->op(), NO_OVERWRITE);
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
// Other arch use a nop here, to signal that there is no inlined
// patchable code. Mips does not need the nop, since our marker
Register result = ToRegister(instr->result());
ASSERT(result.is(v0));
- InstanceofStub stub(InstanceofStub::kArgsInRegisters);
+ InstanceofStub stub(isolate(), InstanceofStub::kArgsInRegisters);
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
__ Branch(&true_label, eq, result, Operand(zero_reg));
flags | InstanceofStub::kCallSiteInlineCheck);
flags = static_cast<InstanceofStub::Flags>(
flags | InstanceofStub::kReturnTrueFalseObject);
- InstanceofStub stub(flags);
+ InstanceofStub stub(isolate(), flags);
PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
LoadContextFromDeferred(instr->context());
ASSERT(ToDoubleRegister(instr->result()).is(f0));
if (exponent_type.IsSmi()) {
- MathPowStub stub(MathPowStub::TAGGED);
+ MathPowStub stub(isolate(), MathPowStub::TAGGED);
__ CallStub(&stub);
} else if (exponent_type.IsTagged()) {
Label no_deopt;
__ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
DeoptimizeIf(ne, instr->environment(), t3, Operand(at));
__ bind(&no_deopt);
- MathPowStub stub(MathPowStub::TAGGED);
+ MathPowStub stub(isolate(), MathPowStub::TAGGED);
__ CallStub(&stub);
} else if (exponent_type.IsInteger32()) {
- MathPowStub stub(MathPowStub::INTEGER);
+ MathPowStub stub(isolate(), MathPowStub::INTEGER);
__ CallStub(&stub);
} else {
ASSERT(exponent_type.IsDouble());
- MathPowStub stub(MathPowStub::DOUBLE);
+ MathPowStub stub(isolate(), MathPowStub::DOUBLE);
__ CallStub(&stub);
}
}
ASSERT(ToRegister(instr->result()).is(v0));
int arity = instr->arity();
- CallFunctionStub stub(arity, instr->hydrogen()->function_flags());
+ CallFunctionStub stub(isolate(), arity, instr->hydrogen()->function_flags());
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
}
__ li(a0, Operand(instr->arity()));
// No cell in a2 for construct type feedback in optimized code
__ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
- CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
+ CallConstructStub stub(isolate(), NO_CALL_FUNCTION_FLAGS);
CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
}
: DONT_OVERRIDE;
if (instr->arity() == 0) {
- ArrayNoArgumentConstructorStub stub(kind, override_mode);
+ ArrayNoArgumentConstructorStub stub(isolate(), kind, override_mode);
CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
} else if (instr->arity() == 1) {
Label done;
__ Branch(&packed_case, eq, t1, Operand(zero_reg));
ElementsKind holey_kind = GetHoleyElementsKind(kind);
- ArraySingleArgumentConstructorStub stub(holey_kind, override_mode);
+ ArraySingleArgumentConstructorStub stub(isolate(),
+ holey_kind,
+ override_mode);
CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
__ jmp(&done);
__ bind(&packed_case);
}
- ArraySingleArgumentConstructorStub stub(kind, override_mode);
+ ArraySingleArgumentConstructorStub stub(isolate(), kind, override_mode);
CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
__ bind(&done);
} else {
- ArrayNArgumentsConstructorStub stub(kind, override_mode);
+ ArrayNArgumentsConstructorStub stub(isolate(), kind, override_mode);
CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
}
}
this, Safepoint::kWithRegistersAndDoubles);
__ li(a1, Operand(to_map));
bool is_js_array = from_map->instance_type() == JS_ARRAY_TYPE;
- TransitionElementsKindStub stub(from_kind, to_kind, is_js_array);
+ TransitionElementsKindStub stub(isolate(), from_kind, to_kind, is_js_array);
__ CallStub(&stub);
RecordSafepointWithRegistersAndDoubles(
instr->pointer_map(), 0, Safepoint::kLazyDeopt);
ASSERT(ToRegister(instr->context()).is(cp));
ASSERT(ToRegister(instr->left()).is(a1));
ASSERT(ToRegister(instr->right()).is(a0));
- StringAddStub stub(instr->hydrogen()->flags(),
+ StringAddStub stub(isolate(),
+ instr->hydrogen()->flags(),
instr->hydrogen()->pretenure_flag());
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
}
// space for nested functions that don't need literals cloning.
bool pretenure = instr->hydrogen()->pretenure();
if (!pretenure && instr->hydrogen()->has_no_literals()) {
- FastNewClosureStub stub(instr->hydrogen()->strict_mode(),
+ FastNewClosureStub stub(isolate(),
+ instr->hydrogen()->strict_mode(),
instr->hydrogen()->is_generator());
__ li(a2, Operand(instr->hydrogen()->shared_info()));
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
switch (codegen_->expected_safepoint_kind_) {
case Safepoint::kWithRegisters: {
- StoreRegistersStateStub stub1(kDontSaveFPRegs);
+ StoreRegistersStateStub stub1(codegen_->masm_->isolate(),
+ kDontSaveFPRegs);
codegen_->masm_->push(ra);
codegen_->masm_->CallStub(&stub1);
break;
}
case Safepoint::kWithRegistersAndDoubles: {
- StoreRegistersStateStub stub2(kSaveFPRegs);
+ StoreRegistersStateStub stub2(codegen_->masm_->isolate(),
+ kSaveFPRegs);
codegen_->masm_->push(ra);
codegen_->masm_->CallStub(&stub2);
break;
ASSERT((kind & Safepoint::kWithRegisters) != 0);
switch (kind) {
case Safepoint::kWithRegisters: {
- RestoreRegistersStateStub stub1(kDontSaveFPRegs);
+ RestoreRegistersStateStub stub1(codegen_->masm_->isolate(),
+ kDontSaveFPRegs);
codegen_->masm_->push(ra);
codegen_->masm_->CallStub(&stub1);
break;
}
case Safepoint::kWithRegistersAndDoubles: {
- RestoreRegistersStateStub stub2(kSaveFPRegs);
+ RestoreRegistersStateStub stub2(codegen_->masm_->isolate(),
+ kSaveFPRegs);
codegen_->masm_->push(ra);
codegen_->masm_->CallStub(&stub2);
break;
if (ra_status == kRAHasNotBeenSaved) {
push(ra);
}
- RecordWriteStub stub(object, value, address, remembered_set_action, fp_mode);
+ RecordWriteStub stub(isolate(), object, value, address, remembered_set_action,
+ fp_mode);
CallStub(&stub);
if (ra_status == kRAHasNotBeenSaved) {
pop(ra);
}
push(ra);
StoreBufferOverflowStub store_buffer_overflow =
- StoreBufferOverflowStub(fp_mode);
+ StoreBufferOverflowStub(isolate(), fp_mode);
CallStub(&store_buffer_overflow);
pop(ra);
bind(&done);
Subu(sp, sp, Operand(kDoubleSize)); // Put input on stack.
sdc1(double_input, MemOperand(sp, 0));
- DoubleToIStub stub(sp, result, 0, true, true);
+ DoubleToIStub stub(isolate(), sp, result, 0, true, true);
CallStub(&stub);
Addu(sp, sp, Operand(kDoubleSize));
// If we fell through then inline version didn't succeed - call stub instead.
push(ra);
- DoubleToIStub stub(object,
+ DoubleToIStub stub(isolate(),
+ object,
result,
HeapNumber::kValueOffset - kHeapObjectTag,
true,
void MacroAssembler::DebugBreak() {
PrepareCEntryArgs(0);
PrepareCEntryFunction(ExternalReference(Runtime::kDebugBreak, isolate()));
- CEntryStub ces(1);
+ CEntryStub ces(isolate(), 1);
ASSERT(AllowThisStubCall(&ces));
Call(ces.GetCode(isolate()), RelocInfo::DEBUG_BREAK);
}
// Native call returns to the DirectCEntry stub which redirects to the
// return address pushed on stack (could have moved after GC).
// DirectCEntry stub itself is generated early and never moves.
- DirectCEntryStub stub;
+ DirectCEntryStub stub(isolate());
stub.GenerateCall(this, t9);
if (FLAG_log_timer_events) {
// smarter.
PrepareCEntryArgs(num_arguments);
PrepareCEntryFunction(ExternalReference(f, isolate()));
- CEntryStub stub(1, save_doubles);
+ CEntryStub stub(isolate(), 1, save_doubles);
CallStub(&stub);
}
PrepareCEntryArgs(num_arguments);
PrepareCEntryFunction(ext);
- CEntryStub stub(1);
+ CEntryStub stub(isolate(), 1);
CallStub(&stub, TypeFeedbackId::None(), al, zero_reg, Operand(zero_reg), bd);
}
void MacroAssembler::JumpToExternalReference(const ExternalReference& builtin,
BranchDelaySlot bd) {
PrepareCEntryFunction(builtin);
- CEntryStub stub(1);
+ CEntryStub stub(isolate(), 1);
Jump(stub.GetCode(isolate()),
RelocInfo::CODE_TARGET,
al,
ExternalReference stack_guard_check =
ExternalReference::re_check_stack_guard_state(masm_->isolate());
__ li(t9, Operand(stack_guard_check));
- DirectCEntryStub stub;
+ DirectCEntryStub stub(isolate());
stub.GenerateCall(masm_, t9);
// DirectCEntryStub allocated space for the C argument slots so we have to
__ li(api_function_address, Operand(ref));
// Jump to stub.
- CallApiFunctionStub stub(is_store, call_data_undefined, argc);
+ CallApiFunctionStub stub(isolate, is_store, call_data_undefined, argc);
__ TailCallStub(&stub);
}
Representation representation) {
if (!reg.is(receiver())) __ mov(receiver(), reg);
if (kind() == Code::LOAD_IC) {
- LoadFieldStub stub(field.is_inobject(holder),
+ LoadFieldStub stub(isolate(),
+ field.is_inobject(holder),
field.translate(holder),
representation);
GenerateTailCall(masm(), stub.GetCode(isolate()));
} else {
- KeyedLoadFieldStub stub(field.is_inobject(holder),
+ KeyedLoadFieldStub stub(isolate(),
+ field.is_inobject(holder),
field.translate(holder),
representation);
GenerateTailCall(masm(), stub.GetCode(isolate()));
ExternalReference ref = ExternalReference(&fun, type, isolate());
__ li(getter_address_reg, Operand(ref));
- CallApiGetterStub stub;
+ CallApiGetterStub stub(isolate());
__ TailCallStub(&stub);
}
CHECK(buffer);
HandleScope handles(isolate);
MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size));
- DoubleToIStub stub(source_reg, destination_reg, 0, true, inline_fastpath);
+ DoubleToIStub stub(isolate, source_reg, destination_reg, 0, true,
+ inline_fastpath);
byte* start = stub.GetCode(isolate)->instruction_start();
Label done;