if (is_construct) {
// No type feedback cell is available
__ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
- CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
+ CallConstructStub stub(masm->isolate(), NO_CALL_FUNCTION_FLAGS);
__ CallStub(&stub);
} else {
ParameterCount actual(r0);
void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) {
// Update the static counter each time a new code stub is generated.
- Isolate* isolate = masm->isolate();
- isolate->counters()->code_stubs()->Increment();
+ isolate()->counters()->code_stubs()->Increment();
- CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(isolate);
+ CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(isolate());
int param_count = descriptor->register_param_count_;
{
// Call the runtime system in a fresh internal frame.
// stub so you don't have to set up the frame.
class ConvertToDoubleStub : public PlatformCodeStub {
public:
- ConvertToDoubleStub(Register result_reg_1,
+ ConvertToDoubleStub(Isolate* isolate,
+ Register result_reg_1,
Register result_reg_2,
Register source_reg,
Register scratch_reg)
- : result1_(result_reg_1),
+ : PlatformCodeStub(isolate),
+ result1_(result_reg_1),
result2_(result_reg_2),
source_(source_reg),
zeros_(scratch_reg) { }
void WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime(
Isolate* isolate) {
- WriteInt32ToHeapNumberStub stub1(r1, r0, r2);
- WriteInt32ToHeapNumberStub stub2(r2, r0, r3);
+ WriteInt32ToHeapNumberStub stub1(isolate, r1, r0, r2);
+ WriteInt32ToHeapNumberStub stub2(isolate, r2, r0, r3);
stub1.GetCode(isolate);
stub2.GetCode(isolate);
}
__ bind(&both_loaded_as_doubles);
// The arguments have been converted to doubles and stored in d6 and d7, if
// VFP3 is supported, or in r0, r1, r2, and r3.
- Isolate* isolate = masm->isolate();
__ bind(&lhs_not_nan);
Label no_nan;
// ARMv7 VFP3 instructions to implement double precision comparison.
__ JumpIfNonSmisNotBothSequentialAsciiStrings(lhs, rhs, r2, r3, &slow);
- __ IncrementCounter(isolate->counters()->string_compare_native(), 1, r2, r3);
+ __ IncrementCounter(isolate()->counters()->string_compare_native(), 1, r2,
+ r3);
if (cc == eq) {
StringCompareStub::GenerateFlatAsciiStringEquals(masm,
lhs,
AllowExternalCallThatCantCauseGC scope(masm);
__ PrepareCallCFunction(argument_count, fp_argument_count, scratch);
- __ mov(r0, Operand(ExternalReference::isolate_address(masm->isolate())));
+ __ mov(r0, Operand(ExternalReference::isolate_address(isolate())));
__ CallCFunction(
- ExternalReference::store_buffer_overflow_function(masm->isolate()),
+ ExternalReference::store_buffer_overflow_function(isolate()),
argument_count);
if (save_doubles_ == kSaveFPRegs) {
__ RestoreFPRegs(sp, scratch);
__ PrepareCallCFunction(0, 2, scratch);
__ MovToFloatParameters(double_base, double_exponent);
__ CallCFunction(
- ExternalReference::power_double_double_function(masm->isolate()),
+ ExternalReference::power_double_double_function(isolate()),
0, 2);
}
__ pop(lr);
__ vcvt_f64_s32(double_exponent, single_scratch);
// Returning or bailing out.
- Counters* counters = masm->isolate()->counters();
+ Counters* counters = isolate()->counters();
if (exponent_type_ == ON_STACK) {
// The arguments are still on the stack.
__ bind(&call_runtime);
__ PrepareCallCFunction(0, 2, scratch);
__ MovToFloatParameters(double_base, double_exponent);
__ CallCFunction(
- ExternalReference::power_double_double_function(masm->isolate()),
+ ExternalReference::power_double_double_function(isolate()),
0, 2);
}
__ pop(lr);
void CodeStub::GenerateFPStubs(Isolate* isolate) {
SaveFPRegsMode mode = kSaveFPRegs;
- CEntryStub save_doubles(1, mode);
- StoreBufferOverflowStub stub(mode);
+ CEntryStub save_doubles(isolate, 1, mode);
+ StoreBufferOverflowStub stub(isolate, mode);
// These stubs might already be in the snapshot, detect that and don't
// regenerate, which would lead to code stub initialization state being messed
// up.
void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
- CEntryStub stub(1, kDontSaveFPRegs);
+ CEntryStub stub(isolate, 1, kDontSaveFPRegs);
stub.GetCode(isolate);
}
// Result returned in r0 or r0+r1 by default.
- Isolate* isolate = masm->isolate();
-
#if V8_HOST_ARCH_ARM
int frame_alignment = MacroAssembler::ActivationFrameAlignment();
int frame_alignment_mask = frame_alignment - 1;
// Call C built-in.
// r0 = argc, r1 = argv
- __ mov(r2, Operand(ExternalReference::isolate_address(isolate)));
+ __ mov(r2, Operand(ExternalReference::isolate_address(isolate())));
// To let the GC traverse the return address of the exit frames, we need to
// know where the return address is. The CEntryStub is unmovable, so
__ b(eq, &exception_returned);
ExternalReference pending_exception_address(
- Isolate::kPendingExceptionAddress, isolate);
+ Isolate::kPendingExceptionAddress, isolate());
// Check that there is no pending exception, otherwise we
// should have returned the exception sentinel.
// r2: receiver
// r3: argc
// r4: argv
- Isolate* isolate = masm->isolate();
int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
if (FLAG_enable_ool_constant_pool) {
- __ mov(r8, Operand(isolate->factory()->empty_constant_pool_array()));
+ __ mov(r8, Operand(isolate()->factory()->empty_constant_pool_array()));
}
__ mov(r7, Operand(Smi::FromInt(marker)));
__ mov(r6, Operand(Smi::FromInt(marker)));
__ mov(r5,
- Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate)));
+ Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate())));
__ ldr(r5, MemOperand(r5));
__ mov(ip, Operand(-1)); // Push a bad frame pointer to fail if it is used.
__ stm(db_w, sp, r5.bit() | r6.bit() | r7.bit() |
// If this is the outermost JS call, set js_entry_sp value.
Label non_outermost_js;
- ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate);
+ ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate());
__ mov(r5, Operand(ExternalReference(js_entry_sp)));
__ ldr(r6, MemOperand(r5));
__ cmp(r6, Operand::Zero());
// fp will be invalid because the PushTryHandler below sets it to 0 to
// signal the existence of the JSEntry frame.
__ mov(ip, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
- isolate)));
+ isolate())));
}
__ str(r0, MemOperand(ip));
__ LoadRoot(r0, Heap::kExceptionRootIndex);
// saved values before returning a failure to C.
// Clear any pending exceptions.
- __ mov(r5, Operand(isolate->factory()->the_hole_value()));
+ __ mov(r5, Operand(isolate()->factory()->the_hole_value()));
__ mov(ip, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
- isolate)));
+ isolate())));
__ str(r5, MemOperand(ip));
// Invoke the function by calling through JS entry trampoline builtin.
// r4: argv
if (is_construct) {
ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
- isolate);
+ isolate());
__ mov(ip, Operand(construct_entry));
} else {
- ExternalReference entry(Builtins::kJSEntryTrampoline, isolate);
+ ExternalReference entry(Builtins::kJSEntryTrampoline, isolate());
__ mov(ip, Operand(entry));
}
__ ldr(ip, MemOperand(ip)); // deref address
// Restore the top frame descriptors from the stack.
__ pop(r3);
__ mov(ip,
- Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate)));
+ Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate())));
__ str(r3, MemOperand(ip));
// Reset the stack to the callee saved registers.
__ b(ne, &slow);
// Null is not instance of anything.
- __ cmp(scratch, Operand(masm->isolate()->factory()->null_value()));
+ __ cmp(scratch, Operand(isolate()->factory()->null_value()));
__ b(ne, &object_not_null);
__ mov(r0, Operand(Smi::FromInt(1)));
__ Ret(HasArgsInRegisters() ? 0 : 2);
// -- r0 : key
// -- r1 : receiver
// -----------------------------------
- __ cmp(r0, Operand(masm->isolate()->factory()->prototype_string()));
+ __ cmp(r0, Operand(isolate()->factory()->prototype_string()));
__ b(ne, &miss);
receiver = r1;
} else {
Register last_match_info_elements = no_reg; // will be r6;
// Ensure that a RegExp stack is allocated.
- Isolate* isolate = masm->isolate();
ExternalReference address_of_regexp_stack_memory_address =
- ExternalReference::address_of_regexp_stack_memory_address(isolate);
+ ExternalReference::address_of_regexp_stack_memory_address(isolate());
ExternalReference address_of_regexp_stack_memory_size =
- ExternalReference::address_of_regexp_stack_memory_size(isolate);
+ ExternalReference::address_of_regexp_stack_memory_size(isolate());
__ mov(r0, Operand(address_of_regexp_stack_memory_size));
__ ldr(r0, MemOperand(r0, 0));
__ cmp(r0, Operand::Zero());
// subject: Subject string
// regexp_data: RegExp data (FixedArray)
// All checks done. Now push arguments for native regexp code.
- __ IncrementCounter(isolate->counters()->regexp_entry_native(), 1, r0, r2);
+ __ IncrementCounter(isolate()->counters()->regexp_entry_native(), 1, r0, r2);
// Isolates: note we add an additional parameter here (isolate pointer).
const int kRegExpExecuteArguments = 9;
// Arguments are before that on the stack or in registers.
// Argument 9 (sp[20]): Pass current isolate address.
- __ mov(r0, Operand(ExternalReference::isolate_address(isolate)));
+ __ mov(r0, Operand(ExternalReference::isolate_address(isolate())));
__ str(r0, MemOperand(sp, 5 * kPointerSize));
// Argument 8 (sp[16]): Indicate that this is a direct call from JavaScript.
// Argument 5 (sp[4]): static offsets vector buffer.
__ mov(r0,
- Operand(ExternalReference::address_of_static_offsets_vector(isolate)));
+ Operand(ExternalReference::address_of_static_offsets_vector(
+ isolate())));
__ str(r0, MemOperand(sp, 1 * kPointerSize));
// For arguments 4 and 3 get string length, calculate start of string data and
// Locate the code entry and call it.
__ add(r6, r6, Operand(Code::kHeaderSize - kHeapObjectTag));
- DirectCEntryStub stub;
+ DirectCEntryStub stub(isolate());
stub.GenerateCall(masm, r6);
__ LeaveExitFrame(false, no_reg, true);
// stack overflow (on the backtrack stack) was detected in RegExp code but
// haven't created the exception yet. Handle that in the runtime system.
// TODO(592): Rerunning the RegExp to get the stack overflow exception.
- __ mov(r1, Operand(isolate->factory()->the_hole_value()));
+ __ mov(r1, Operand(isolate()->factory()->the_hole_value()));
__ mov(r2, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
- isolate)));
+ isolate())));
__ ldr(r0, MemOperand(r2, 0));
__ cmp(r0, r1);
__ b(eq, &runtime);
__ bind(&failure);
// For failure and exception return null.
- __ mov(r0, Operand(masm->isolate()->factory()->null_value()));
+ __ mov(r0, Operand(isolate()->factory()->null_value()));
__ add(sp, sp, Operand(4 * kPointerSize));
__ Ret();
// Get the static offsets vector filled by the native regexp code.
ExternalReference address_of_static_offsets_vector =
- ExternalReference::address_of_static_offsets_vector(isolate);
+ ExternalReference::address_of_static_offsets_vector(isolate());
__ mov(r2, Operand(address_of_static_offsets_vector));
// r1: number of capture registers
__ SmiTag(r0);
__ Push(r3, r2, r1, r0);
- CreateAllocationSiteStub create_stub;
+ CreateAllocationSiteStub create_stub(masm->isolate());
__ CallStub(&create_stub);
__ Pop(r3, r2, r1, r0);
// If there is a call target cache, mark it megamorphic in the
// non-function case. MegamorphicSentinel is an immortal immovable
// object (megamorphic symbol) so no write barrier is needed.
- ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(masm->isolate()),
- masm->isolate()->heap()->megamorphic_symbol());
+ ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(isolate()),
+ isolate()->heap()->megamorphic_symbol());
__ add(r5, r2, Operand::PointerOffsetFromSmiKey(r3));
__ LoadRoot(ip, Heap::kMegamorphicSymbolRootIndex);
__ str(ip, FieldMemOperand(r5, FixedArray::kHeaderSize));
__ GetBuiltinFunction(r1, Builtins::CALL_FUNCTION_PROXY);
{
Handle<Code> adaptor =
- masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
+ isolate()->builtins()->ArgumentsAdaptorTrampoline();
__ Jump(adaptor, RelocInfo::CODE_TARGET);
}
__ mov(r0, Operand(argc_)); // Set up the number of arguments.
__ mov(r2, Operand::Zero());
__ GetBuiltinFunction(r1, Builtins::CALL_NON_FUNCTION);
- __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
+ __ Jump(isolate()->builtins()->ArgumentsAdaptorTrampoline(),
RelocInfo::CODE_TARGET);
}
__ bind(&do_call);
// Set expected number of arguments to zero (not changing r0).
__ mov(r2, Operand::Zero());
- __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
+ __ Jump(isolate()->builtins()->ArgumentsAdaptorTrampoline(),
RelocInfo::CODE_TARGET);
}
masm, r1, r5, r2, r3, r4, r6, r9, DEST_ALWAYS_ALIGNED);
__ bind(&return_r0);
- Counters* counters = masm->isolate()->counters();
+ Counters* counters = isolate()->counters();
__ IncrementCounter(counters->sub_string_native(), 1, r3, r4);
__ Drop(3);
__ Ret();
void StringCompareStub::Generate(MacroAssembler* masm) {
Label runtime;
- Counters* counters = masm->isolate()->counters();
+ Counters* counters = isolate()->counters();
// Stack frame on entry.
// sp[0]: right string
// -- r0 : right
// -- lr : return address
// -----------------------------------
- Isolate* isolate = masm->isolate();
// Load r2 with the allocation site. We stick an undefined dummy value here
// and replace it with the real allocation site later when we instantiate this
// stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate().
- __ Move(r2, handle(isolate->heap()->undefined_value()));
+ __ Move(r2, handle(isolate()->heap()->undefined_value()));
// Make sure that we actually patched the allocation site.
if (FLAG_debug_code) {
// Tail call into the stub that handles binary operations with allocation
// sites.
- BinaryOpWithAllocationSiteStub stub(state_);
+ BinaryOpWithAllocationSiteStub stub(isolate(), state_);
__ TailCallStub(&stub);
}
__ bind(&unordered);
__ bind(&generic_stub);
- ICCompareStub stub(op_, CompareIC::GENERIC, CompareIC::GENERIC,
+ ICCompareStub stub(isolate(), op_, CompareIC::GENERIC, CompareIC::GENERIC,
CompareIC::GENERIC);
- __ Jump(stub.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
+ __ Jump(stub.GetCode(isolate()), RelocInfo::CODE_TARGET);
__ bind(&maybe_undefined1);
if (Token::IsOrderedRelationalCompareOp(op_)) {
{
// Call the runtime system in a fresh internal frame.
ExternalReference miss =
- ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate());
+ ExternalReference(IC_Utility(IC::kCompareIC_Miss), isolate());
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
__ Push(r1, r0);
void DirectCEntryStub::GenerateCall(MacroAssembler* masm,
Register target) {
intptr_t code =
- reinterpret_cast<intptr_t>(GetCode(masm->isolate()).location());
+ reinterpret_cast<intptr_t>(GetCode(isolate()).location());
__ Move(ip, target);
__ mov(lr, Operand(code, RelocInfo::CODE_TARGET));
__ blx(lr); // Call the stub.
__ stm(db_w, sp, spill_mask);
__ ldr(r0, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
__ mov(r1, Operand(Handle<Name>(name)));
- NameDictionaryLookupStub stub(NEGATIVE_LOOKUP);
+ NameDictionaryLookupStub stub(masm->isolate(), NEGATIVE_LOOKUP);
__ CallStub(&stub);
__ cmp(r0, Operand::Zero());
__ ldm(ia_w, sp, spill_mask);
__ Move(r0, elements);
__ Move(r1, name);
}
- NameDictionaryLookupStub stub(POSITIVE_LOOKUP);
+ NameDictionaryLookupStub stub(masm->isolate(), POSITIVE_LOOKUP);
__ CallStub(&stub);
__ cmp(r0, Operand::Zero());
__ mov(scratch2, Operand(r2));
void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
Isolate* isolate) {
- StoreBufferOverflowStub stub1(kDontSaveFPRegs);
+ StoreBufferOverflowStub stub1(isolate, kDontSaveFPRegs);
stub1.GetCode(isolate);
// Hydrogen code stubs need stub2 at snapshot time.
- StoreBufferOverflowStub stub2(kSaveFPRegs);
+ StoreBufferOverflowStub stub2(isolate, kSaveFPRegs);
stub2.GetCode(isolate);
}
__ Move(address, regs_.address());
__ Move(r0, regs_.object());
__ Move(r1, address);
- __ mov(r2, Operand(ExternalReference::isolate_address(masm->isolate())));
+ __ mov(r2, Operand(ExternalReference::isolate_address(isolate())));
AllowExternalCallThatCantCauseGC scope(masm);
__ CallCFunction(
- ExternalReference::incremental_marking_record_write_function(
- masm->isolate()),
+ ExternalReference::incremental_marking_record_write_function(isolate()),
argument_count);
regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_);
}
void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
- CEntryStub ces(1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs);
- __ Call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
+ CEntryStub ces(isolate(), 1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs);
+ __ Call(ces.GetCode(isolate()), RelocInfo::CODE_TARGET);
int parameter_count_offset =
StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset;
__ ldr(r1, MemOperand(fp, parameter_count_offset));
void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
if (masm->isolate()->function_entry_hook() != NULL) {
- ProfileEntryHookStub stub;
+ ProfileEntryHookStub stub(masm->isolate());
int code_size = masm->CallStubSize(&stub) + 2 * Assembler::kInstrSize;
PredictableCodeSizeScope predictable(masm, code_size);
__ push(lr);
#if V8_HOST_ARCH_ARM
int32_t entry_hook =
- reinterpret_cast<int32_t>(masm->isolate()->function_entry_hook());
+ reinterpret_cast<int32_t>(isolate()->function_entry_hook());
__ mov(ip, Operand(entry_hook));
#else
// Under the simulator we need to indirect the entry hook through a
// trampoline function at a known address.
// It additionally takes an isolate as a third parameter
- __ mov(r2, Operand(ExternalReference::isolate_address(masm->isolate())));
+ __ mov(r2, Operand(ExternalReference::isolate_address(isolate())));
ApiFunction dispatcher(FUNCTION_ADDR(EntryHookTrampoline));
__ mov(ip, Operand(ExternalReference(&dispatcher,
ExternalReference::BUILTIN_CALL,
- masm->isolate())));
+ isolate())));
#endif
__ Call(ip);
static void CreateArrayDispatch(MacroAssembler* masm,
AllocationSiteOverrideMode mode) {
if (mode == DISABLE_ALLOCATION_SITES) {
- T stub(GetInitialFastElementsKind(), mode);
+ T stub(masm->isolate(), GetInitialFastElementsKind(), mode);
__ TailCallStub(&stub);
} else if (mode == DONT_OVERRIDE) {
int last_index = GetSequenceIndexFromFastElementsKind(
for (int i = 0; i <= last_index; ++i) {
ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
__ cmp(r3, Operand(kind));
- T stub(kind);
+ T stub(masm->isolate(), kind);
__ TailCallStub(&stub, eq);
}
ElementsKind initial = GetInitialFastElementsKind();
ElementsKind holey_initial = GetHoleyElementsKind(initial);
- ArraySingleArgumentConstructorStub stub_holey(holey_initial,
+ ArraySingleArgumentConstructorStub stub_holey(masm->isolate(),
+ holey_initial,
DISABLE_ALLOCATION_SITES);
__ TailCallStub(&stub_holey);
__ bind(&normal_sequence);
- ArraySingleArgumentConstructorStub stub(initial,
+ ArraySingleArgumentConstructorStub stub(masm->isolate(),
+ initial,
DISABLE_ALLOCATION_SITES);
__ TailCallStub(&stub);
} else if (mode == DONT_OVERRIDE) {
for (int i = 0; i <= last_index; ++i) {
ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
__ cmp(r3, Operand(kind));
- ArraySingleArgumentConstructorStub stub(kind);
+ ArraySingleArgumentConstructorStub stub(masm->isolate(), kind);
__ TailCallStub(&stub, eq);
}
TERMINAL_FAST_ELEMENTS_KIND);
for (int i = 0; i <= to_index; ++i) {
ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
- T stub(kind);
+ T stub(isolate, kind);
stub.GetCode(isolate);
if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) {
- T stub1(kind, DISABLE_ALLOCATION_SITES);
+ T stub1(isolate, kind, DISABLE_ALLOCATION_SITES);
stub1.GetCode(isolate);
}
}
ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS };
for (int i = 0; i < 2; i++) {
// For internal arrays we only need a few things
- InternalArrayNoArgumentConstructorStub stubh1(kinds[i]);
+ InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[i]);
stubh1.GetCode(isolate);
- InternalArraySingleArgumentConstructorStub stubh2(kinds[i]);
+ InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]);
stubh2.GetCode(isolate);
- InternalArrayNArgumentsConstructorStub stubh3(kinds[i]);
+ InternalArrayNArgumentsConstructorStub stubh3(isolate, kinds[i]);
stubh3.GetCode(isolate);
}
}
MacroAssembler* masm, ElementsKind kind) {
__ cmp(r0, Operand(1));
- InternalArrayNoArgumentConstructorStub stub0(kind);
+ InternalArrayNoArgumentConstructorStub stub0(isolate(), kind);
__ TailCallStub(&stub0, lo);
- InternalArrayNArgumentsConstructorStub stubN(kind);
+ InternalArrayNArgumentsConstructorStub stubN(isolate(), kind);
__ TailCallStub(&stubN, hi);
if (IsFastPackedElementsKind(kind)) {
__ cmp(r3, Operand::Zero());
InternalArraySingleArgumentConstructorStub
- stub1_holey(GetHoleyElementsKind(kind));
+ stub1_holey(isolate(), GetHoleyElementsKind(kind));
__ TailCallStub(&stub1_holey, ne);
}
- InternalArraySingleArgumentConstructorStub stub1(kind);
+ InternalArraySingleArgumentConstructorStub stub1(isolate(), kind);
__ TailCallStub(&stub1);
}
STATIC_ASSERT(FCA::kHolderIndex == 0);
STATIC_ASSERT(FCA::kArgsLength == 7);
- Isolate* isolate = masm->isolate();
-
// context save
__ push(context);
// load context from callee
__ push(scratch);
// isolate
__ mov(scratch,
- Operand(ExternalReference::isolate_address(isolate)));
+ Operand(ExternalReference::isolate_address(isolate())));
__ push(scratch);
// holder
__ push(holder);
ExternalReference::Type thunk_type = ExternalReference::PROFILING_API_CALL;
ApiFunction thunk_fun(thunk_address);
ExternalReference thunk_ref = ExternalReference(&thunk_fun, thunk_type,
- masm->isolate());
+ isolate());
AllowExternalCallThatCantCauseGC scope(masm);
MemOperand context_restore_operand(
ExternalReference::PROFILING_GETTER_CALL;
ApiFunction thunk_fun(thunk_address);
ExternalReference thunk_ref = ExternalReference(&thunk_fun, thunk_type,
- masm->isolate());
+ isolate());
__ CallApiFunctionAndReturn(api_function_address,
thunk_ref,
kStackUnwindSpace,
class StoreBufferOverflowStub: public PlatformCodeStub {
public:
- explicit StoreBufferOverflowStub(SaveFPRegsMode save_fp)
- : save_doubles_(save_fp) {}
+ StoreBufferOverflowStub(Isolate* isolate, SaveFPRegsMode save_fp)
+ : PlatformCodeStub(isolate), save_doubles_(save_fp) {}
void Generate(MacroAssembler* masm);
class SubStringStub: public PlatformCodeStub {
public:
- SubStringStub() {}
+ explicit SubStringStub(Isolate* isolate) : PlatformCodeStub(isolate) {}
private:
Major MajorKey() { return SubString; }
class StringCompareStub: public PlatformCodeStub {
public:
- StringCompareStub() { }
+ explicit StringCompareStub(Isolate* isolate) : PlatformCodeStub(isolate) { }
// Compares two flat ASCII strings and returns result in r0.
static void GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
// so you don't have to set up the frame.
class WriteInt32ToHeapNumberStub : public PlatformCodeStub {
public:
- WriteInt32ToHeapNumberStub(Register the_int,
+ WriteInt32ToHeapNumberStub(Isolate* isolate,
+ Register the_int,
Register the_heap_number,
Register scratch)
- : the_int_(the_int),
+ : PlatformCodeStub(isolate),
+ the_int_(the_int),
the_heap_number_(the_heap_number),
scratch_(scratch) { }
class RecordWriteStub: public PlatformCodeStub {
public:
- RecordWriteStub(Register object,
+ RecordWriteStub(Isolate* isolate,
+ Register object,
Register value,
Register address,
RememberedSetAction remembered_set_action,
SaveFPRegsMode fp_mode)
- : object_(object),
+ : PlatformCodeStub(isolate),
+ object_(object),
value_(value),
address_(address),
remembered_set_action_(remembered_set_action),
// moved by GC
class DirectCEntryStub: public PlatformCodeStub {
public:
- DirectCEntryStub() {}
+ explicit DirectCEntryStub(Isolate* isolate) : PlatformCodeStub(isolate) {}
void Generate(MacroAssembler* masm);
void GenerateCall(MacroAssembler* masm, Register target);
public:
enum LookupMode { POSITIVE_LOOKUP, NEGATIVE_LOOKUP };
- explicit NameDictionaryLookupStub(LookupMode mode) : mode_(mode) { }
+ NameDictionaryLookupStub(Isolate* isolate, LookupMode mode)
+ : PlatformCodeStub(isolate), mode_(mode) { }
void Generate(MacroAssembler* masm);
__ mov(r0, Operand::Zero()); // no arguments
__ mov(r1, Operand(ExternalReference::debug_break(masm->isolate())));
- CEntryStub ceb(1);
+ CEntryStub ceb(masm->isolate(), 1);
__ CallStub(&ceb);
// Restore the register values from the expression stack.
__ Push(info->scope()->GetScopeInfo());
__ CallRuntime(Runtime::kHiddenNewGlobalContext, 2);
} else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
- FastNewContextStub stub(heap_slots);
+ FastNewContextStub stub(isolate(), heap_slots);
__ CallStub(&stub);
} else {
__ push(r1);
} else {
type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
}
- ArgumentsAccessStub stub(type);
+ ArgumentsAccessStub stub(isolate(), type);
__ CallStub(&stub);
SetVar(arguments, r0, r1, r2);
!pretenure &&
scope()->is_function_scope() &&
info->num_literals() == 0) {
- FastNewClosureStub stub(info->strict_mode(), info->is_generator());
+ FastNewClosureStub stub(isolate(),
+ info->strict_mode(),
+ info->is_generator());
__ mov(r2, Operand(info));
__ CallStub(&stub);
} else {
__ Push(r3, r2, r1, r0);
__ CallRuntime(Runtime::kHiddenCreateObjectLiteral, 4);
} else {
- FastCloneShallowObjectStub stub(properties_count);
+ FastCloneShallowObjectStub stub(isolate(), properties_count);
__ CallStub(&stub);
}
if (has_fast_elements && constant_elements_values->map() ==
isolate()->heap()->fixed_cow_array_map()) {
FastCloneShallowArrayStub stub(
+ isolate(),
FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
allocation_site_mode,
length);
mode = FastCloneShallowArrayStub::CLONE_ELEMENTS;
}
- FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
+ FastCloneShallowArrayStub stub(isolate(), mode, allocation_site_mode,
+ length);
__ CallStub(&stub);
}
EMIT_REMEMBERED_SET, INLINE_SMI_CHECK);
} else {
__ mov(r3, Operand(Smi::FromInt(i)));
- StoreArrayLiteralElementStub stub;
+ StoreArrayLiteralElementStub stub(isolate());
__ CallStub(&stub);
}
CallIC(ic, TypeFeedbackId::None());
__ mov(r1, r0);
__ str(r1, MemOperand(sp, 2 * kPointerSize));
- CallFunctionStub stub(1, CALL_AS_METHOD);
+ CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
__ CallStub(&stub);
__ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
patch_site.EmitJumpIfSmi(scratch1, &smi_case);
__ bind(&stub_call);
- BinaryOpICStub stub(op, mode);
+ BinaryOpICStub stub(isolate(), op, mode);
CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
patch_site.EmitPatchInfo();
__ jmp(&done);
Token::Value op,
OverwriteMode mode) {
__ pop(r1);
- BinaryOpICStub stub(op, mode);
+ BinaryOpICStub stub(isolate(), op, mode);
JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
patch_site.EmitPatchInfo();
// Record source position for debugger.
SetSourcePosition(expr->position());
- CallFunctionStub stub(arg_count, flags);
+ CallFunctionStub stub(isolate(), arg_count, flags);
__ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
__ CallStub(&stub);
// Record source position for debugger.
SetSourcePosition(expr->position());
- CallFunctionStub stub(arg_count, CALL_AS_METHOD);
+ CallFunctionStub stub(isolate(), arg_count, CALL_AS_METHOD);
__ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
__ CallStub(&stub);
__ mov(r3, Operand(Smi::FromInt(expr->CallFeedbackSlot())));
// Record call targets in unoptimized code.
- CallFunctionStub stub(arg_count, RECORD_CALL_TARGET);
+ CallFunctionStub stub(isolate(), arg_count, RECORD_CALL_TARGET);
__ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
__ CallStub(&stub);
RecordJSReturnSite(expr);
// Record source position for debugger.
SetSourcePosition(expr->position());
- CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS);
+ CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
__ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
__ CallStub(&stub);
RecordJSReturnSite(expr);
__ Move(r2, FeedbackVector());
__ mov(r3, Operand(Smi::FromInt(expr->CallNewFeedbackSlot())));
- CallConstructStub stub(RECORD_CALL_TARGET);
+ CallConstructStub stub(isolate(), RECORD_CALL_TARGET);
__ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL);
PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
context()->Plug(r0);
VisitForAccumulatorValue(args->at(0));
__ mov(r1, r0);
__ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
- ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
+ ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
__ CallStub(&stub);
context()->Plug(r0);
}
void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
// Load the arguments on the stack and call the stub.
- SubStringStub stub;
+ SubStringStub stub(isolate());
ZoneList<Expression*>* args = expr->arguments();
ASSERT(args->length() == 3);
VisitForStackValue(args->at(0));
void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
// Load the arguments on the stack and call the stub.
- RegExpExecStub stub;
+ RegExpExecStub stub(isolate());
ZoneList<Expression*>* args = expr->arguments();
ASSERT(args->length() == 4);
VisitForStackValue(args->at(0));
ASSERT(args->length() == 2);
VisitForStackValue(args->at(0));
VisitForStackValue(args->at(1));
- MathPowStub stub(MathPowStub::ON_STACK);
+ MathPowStub stub(isolate(), MathPowStub::ON_STACK);
__ CallStub(&stub);
context()->Plug(r0);
}
// Load the argument into r0 and call the stub.
VisitForAccumulatorValue(args->at(0));
- NumberToStringStub stub;
+ NumberToStringStub stub(isolate());
__ CallStub(&stub);
context()->Plug(r0);
}
VisitForAccumulatorValue(args->at(1));
__ pop(r1);
- StringAddStub stub(STRING_ADD_CHECK_BOTH, NOT_TENURED);
+ StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
__ CallStub(&stub);
context()->Plug(r0);
}
VisitForStackValue(args->at(0));
VisitForStackValue(args->at(1));
- StringCompareStub stub;
+ StringCompareStub stub(isolate());
__ CallStub(&stub);
context()->Plug(r0);
}
void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
- RegExpConstructResultStub stub;
+ RegExpConstructResultStub stub(isolate());
ZoneList<Expression*>* args = expr->arguments();
ASSERT(args->length() == 3);
VisitForStackValue(args->at(0));
// Record source position of the IC call.
SetSourcePosition(expr->position());
- CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS);
+ CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
__ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
__ CallStub(&stub);
__ jmp(&stub_call);
__ bind(&slow);
}
- ToNumberStub convert_stub;
+ ToNumberStub convert_stub(isolate());
__ CallStub(&convert_stub);
// Save result for postfix expressions.
// Record position before stub call.
SetSourcePosition(expr->position());
- BinaryOpICStub stub(Token::ADD, NO_OVERWRITE);
+ BinaryOpICStub stub(isolate(), Token::ADD, NO_OVERWRITE);
CallIC(stub.GetCode(isolate()), expr->CountBinOpFeedbackId());
patch_site.EmitPatchInfo();
__ bind(&done);
case Token::INSTANCEOF: {
VisitForStackValue(expr->right());
- InstanceofStub stub(InstanceofStub::kNoFlags);
+ InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
__ CallStub(&stub);
PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
// The stub returns 0 for true.
Comment(";;; Allocate local context");
// Argument to NewContext is the function, which is in r1.
if (heap_slots <= FastNewContextStub::kMaximumSlots) {
- FastNewContextStub stub(heap_slots);
+ FastNewContextStub stub(isolate(), heap_slots);
__ CallStub(&stub);
} else {
__ push(r1);
ASSERT(ToRegister(instr->result()).is(r0));
switch (instr->hydrogen()->major_key()) {
case CodeStub::RegExpExec: {
- RegExpExecStub stub;
+ RegExpExecStub stub(isolate());
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
break;
}
case CodeStub::SubString: {
- SubStringStub stub;
+ SubStringStub stub(isolate());
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
break;
}
case CodeStub::StringCompare: {
- StringCompareStub stub;
+ StringCompareStub stub(isolate());
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
break;
}
ASSERT(ToRegister(instr->right()).is(r0));
ASSERT(ToRegister(instr->result()).is(r0));
- BinaryOpICStub stub(instr->op(), NO_OVERWRITE);
+ BinaryOpICStub stub(isolate(), instr->op(), NO_OVERWRITE);
// Block literal pool emission to ensure nop indicating no inlined smi code
// is in the correct position.
Assembler::BlockConstPoolScope block_const_pool(masm());
ASSERT(ToRegister(instr->left()).is(r0)); // Object is in r0.
ASSERT(ToRegister(instr->right()).is(r1)); // Function is in r1.
- InstanceofStub stub(InstanceofStub::kArgsInRegisters);
+ InstanceofStub stub(isolate(), InstanceofStub::kArgsInRegisters);
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
__ cmp(r0, Operand::Zero());
flags | InstanceofStub::kCallSiteInlineCheck);
flags = static_cast<InstanceofStub::Flags>(
flags | InstanceofStub::kReturnTrueFalseObject);
- InstanceofStub stub(flags);
+ InstanceofStub stub(isolate(), flags);
PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
LoadContextFromDeferred(instr->context());
ASSERT(ToDoubleRegister(instr->result()).is(d2));
if (exponent_type.IsSmi()) {
- MathPowStub stub(MathPowStub::TAGGED);
+ MathPowStub stub(isolate(), MathPowStub::TAGGED);
__ CallStub(&stub);
} else if (exponent_type.IsTagged()) {
Label no_deopt;
__ cmp(r6, Operand(ip));
DeoptimizeIf(ne, instr->environment());
__ bind(&no_deopt);
- MathPowStub stub(MathPowStub::TAGGED);
+ MathPowStub stub(isolate(), MathPowStub::TAGGED);
__ CallStub(&stub);
} else if (exponent_type.IsInteger32()) {
- MathPowStub stub(MathPowStub::INTEGER);
+ MathPowStub stub(isolate(), MathPowStub::INTEGER);
__ CallStub(&stub);
} else {
ASSERT(exponent_type.IsDouble());
- MathPowStub stub(MathPowStub::DOUBLE);
+ MathPowStub stub(isolate(), MathPowStub::DOUBLE);
__ CallStub(&stub);
}
}
ASSERT(ToRegister(instr->result()).is(r0));
int arity = instr->arity();
- CallFunctionStub stub(arity, instr->hydrogen()->function_flags());
+ CallFunctionStub stub(isolate(), arity, instr->hydrogen()->function_flags());
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
}
__ mov(r0, Operand(instr->arity()));
// No cell in r2 for construct type feedback in optimized code
__ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
- CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
+ CallConstructStub stub(isolate(), NO_CALL_FUNCTION_FLAGS);
CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
}
: DONT_OVERRIDE;
if (instr->arity() == 0) {
- ArrayNoArgumentConstructorStub stub(kind, override_mode);
+ ArrayNoArgumentConstructorStub stub(isolate(), kind, override_mode);
CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
} else if (instr->arity() == 1) {
Label done;
__ b(eq, &packed_case);
ElementsKind holey_kind = GetHoleyElementsKind(kind);
- ArraySingleArgumentConstructorStub stub(holey_kind, override_mode);
+ ArraySingleArgumentConstructorStub stub(isolate(),
+ holey_kind,
+ override_mode);
CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
__ jmp(&done);
__ bind(&packed_case);
}
- ArraySingleArgumentConstructorStub stub(kind, override_mode);
+ ArraySingleArgumentConstructorStub stub(isolate(), kind, override_mode);
CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
__ bind(&done);
} else {
- ArrayNArgumentsConstructorStub stub(kind, override_mode);
+ ArrayNArgumentsConstructorStub stub(isolate(), kind, override_mode);
CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
}
}
this, Safepoint::kWithRegistersAndDoubles);
__ Move(r1, to_map);
bool is_js_array = from_map->instance_type() == JS_ARRAY_TYPE;
- TransitionElementsKindStub stub(from_kind, to_kind, is_js_array);
+ TransitionElementsKindStub stub(isolate(), from_kind, to_kind, is_js_array);
__ CallStub(&stub);
RecordSafepointWithRegistersAndDoubles(
instr->pointer_map(), 0, Safepoint::kLazyDeopt);
ASSERT(ToRegister(instr->context()).is(cp));
ASSERT(ToRegister(instr->left()).is(r1));
ASSERT(ToRegister(instr->right()).is(r0));
- StringAddStub stub(instr->hydrogen()->flags(),
+ StringAddStub stub(isolate(),
+ instr->hydrogen()->flags(),
instr->hydrogen()->pretenure_flag());
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
}
// space for nested functions that don't need literals cloning.
bool pretenure = instr->hydrogen()->pretenure();
if (!pretenure && instr->hydrogen()->has_no_literals()) {
- FastNewClosureStub stub(instr->hydrogen()->strict_mode(),
+ FastNewClosureStub stub(isolate(),
+ instr->hydrogen()->strict_mode(),
instr->hydrogen()->is_generator());
__ mov(r2, Operand(instr->hydrogen()->shared_info()));
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
if (lr_status == kLRHasNotBeenSaved) {
push(lr);
}
- RecordWriteStub stub(object, value, address, remembered_set_action, fp_mode);
+ RecordWriteStub stub(isolate(), object, value, address, remembered_set_action,
+ fp_mode);
CallStub(&stub);
if (lr_status == kLRHasNotBeenSaved) {
pop(lr);
}
push(lr);
StoreBufferOverflowStub store_buffer_overflow =
- StoreBufferOverflowStub(fp_mode);
+ StoreBufferOverflowStub(isolate(), fp_mode);
CallStub(&store_buffer_overflow);
pop(lr);
bind(&done);
void MacroAssembler::DebugBreak() {
mov(r0, Operand::Zero());
mov(r1, Operand(ExternalReference(Runtime::kDebugBreak, isolate())));
- CEntryStub ces(1);
+ CEntryStub ces(isolate(), 1);
ASSERT(AllowThisStubCall(&ces));
Call(ces.GetCode(isolate()), RelocInfo::DEBUG_BREAK);
}
// Native call returns to the DirectCEntry stub which redirects to the
// return address pushed on stack (could have moved after GC).
// DirectCEntry stub itself is generated early and never moves.
- DirectCEntryStub stub;
+ DirectCEntryStub stub(isolate());
stub.GenerateCall(this, r3);
if (FLAG_log_timer_events) {
sub(sp, sp, Operand(kDoubleSize)); // Put input on stack.
vstr(double_input, MemOperand(sp, 0));
- DoubleToIStub stub(sp, result, 0, true, true);
+ DoubleToIStub stub(isolate(), sp, result, 0, true, true);
CallStub(&stub);
add(sp, sp, Operand(kDoubleSize));
// If we fell through then inline version didn't succeed - call stub instead.
push(lr);
- DoubleToIStub stub(object,
+ DoubleToIStub stub(isolate(),
+ object,
result,
HeapNumber::kValueOffset - kHeapObjectTag,
true,
// smarter.
mov(r0, Operand(num_arguments));
mov(r1, Operand(ExternalReference(f, isolate())));
- CEntryStub stub(1, save_doubles);
+ CEntryStub stub(isolate(), 1, save_doubles);
CallStub(&stub);
}
mov(r0, Operand(num_arguments));
mov(r1, Operand(ext));
- CEntryStub stub(1);
+ CEntryStub stub(isolate(), 1);
CallStub(&stub);
}
ASSERT((reinterpret_cast<intptr_t>(builtin.address()) & 1) == 1);
#endif
mov(r1, Operand(builtin));
- CEntryStub stub(1);
+ CEntryStub stub(isolate(), 1);
Jump(stub.GetCode(isolate()), RelocInfo::CODE_TARGET);
}
ExternalReference stack_guard_check =
ExternalReference::re_check_stack_guard_state(isolate());
__ mov(ip, Operand(stack_guard_check));
- DirectCEntryStub stub;
+ DirectCEntryStub stub(isolate());
stub.GenerateCall(masm_, ip);
// Drop the return address from the stack.
__ mov(api_function_address, Operand(ref));
// Jump to stub.
- CallApiFunctionStub stub(is_store, call_data_undefined, argc);
+ CallApiFunctionStub stub(isolate, is_store, call_data_undefined, argc);
__ TailCallStub(&stub);
}
Representation representation) {
if (!reg.is(receiver())) __ mov(receiver(), reg);
if (kind() == Code::LOAD_IC) {
- LoadFieldStub stub(field.is_inobject(holder),
+ LoadFieldStub stub(isolate(),
+ field.is_inobject(holder),
field.translate(holder),
representation);
GenerateTailCall(masm(), stub.GetCode(isolate()));
} else {
- KeyedLoadFieldStub stub(field.is_inobject(holder),
+ KeyedLoadFieldStub stub(isolate(),
+ field.is_inobject(holder),
field.translate(holder),
representation);
GenerateTailCall(masm(), stub.GetCode(isolate()));
ExternalReference ref = ExternalReference(&fun, type, isolate());
__ mov(getter_address_reg, Operand(ref));
- CallApiGetterStub stub;
+ CallApiGetterStub stub(isolate());
__ TailCallStub(&stub);
}
// No type feedback cell is available.
__ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
- CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
+ CallConstructStub stub(masm->isolate(), NO_CALL_FUNCTION_FLAGS);
__ CallStub(&stub);
} else {
ParameterCount actual(x0);
void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) {
// Update the static counter each time a new code stub is generated.
- Isolate* isolate = masm->isolate();
- isolate->counters()->code_stubs()->Increment();
+ isolate()->counters()->code_stubs()->Increment();
- CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(isolate);
+ CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(isolate());
int param_count = descriptor->register_param_count_;
{
// Call the runtime system in a fresh internal frame.
__ JumpIfBothInstanceTypesAreNotSequentialAscii(lhs_type, rhs_type, x14,
x15, &slow);
- Isolate* isolate = masm->isolate();
- __ IncrementCounter(isolate->counters()->string_compare_native(), 1, x10,
+ __ IncrementCounter(isolate()->counters()->string_compare_native(), 1, x10,
x11);
if (cond == eq) {
StringCompareStub::GenerateFlatAsciiStringEquals(masm, lhs, rhs,
}
AllowExternalCallThatCantCauseGC scope(masm);
- __ Mov(x0, ExternalReference::isolate_address(masm->isolate()));
+ __ Mov(x0, ExternalReference::isolate_address(isolate()));
__ CallCFunction(
- ExternalReference::store_buffer_overflow_function(masm->isolate()),
- 1, 0);
+ ExternalReference::store_buffer_overflow_function(isolate()), 1, 0);
if (save_doubles_ == kSaveFPRegs) {
__ PopCPURegList(saved_fp_regs);
void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
Isolate* isolate) {
- StoreBufferOverflowStub stub1(kDontSaveFPRegs);
+ StoreBufferOverflowStub stub1(isolate, kDontSaveFPRegs);
stub1.GetCode(isolate);
- StoreBufferOverflowStub stub2(kSaveFPRegs);
+ StoreBufferOverflowStub stub2(isolate, kSaveFPRegs);
stub2.GetCode(isolate);
}
AllowExternalCallThatCantCauseGC scope(masm);
__ Mov(saved_lr, lr);
__ CallCFunction(
- ExternalReference::power_double_double_function(masm->isolate()),
+ ExternalReference::power_double_double_function(isolate()),
0, 2);
__ Mov(lr, saved_lr);
__ B(&done);
result_double);
ASSERT(result_tagged.is(x0));
__ IncrementCounter(
- masm->isolate()->counters()->math_pow(), 1, scratch0, scratch1);
+ isolate()->counters()->math_pow(), 1, scratch0, scratch1);
__ Ret();
} else {
AllowExternalCallThatCantCauseGC scope(masm);
__ Fmov(base_double, base_double_copy);
__ Scvtf(exponent_double, exponent_integer);
__ CallCFunction(
- ExternalReference::power_double_double_function(masm->isolate()),
+ ExternalReference::power_double_double_function(isolate()),
0, 2);
__ Mov(lr, saved_lr);
__ Bind(&done);
__ IncrementCounter(
- masm->isolate()->counters()->math_pow(), 1, scratch0, scratch1);
+ isolate()->counters()->math_pow(), 1, scratch0, scratch1);
__ Ret();
}
}
void StoreRegistersStateStub::GenerateAheadOfTime(Isolate* isolate) {
- StoreRegistersStateStub stub1(kDontSaveFPRegs);
+ StoreRegistersStateStub stub1(isolate, kDontSaveFPRegs);
stub1.GetCode(isolate);
- StoreRegistersStateStub stub2(kSaveFPRegs);
+ StoreRegistersStateStub stub2(isolate, kSaveFPRegs);
stub2.GetCode(isolate);
}
void RestoreRegistersStateStub::GenerateAheadOfTime(Isolate* isolate) {
- RestoreRegistersStateStub stub1(kDontSaveFPRegs);
+ RestoreRegistersStateStub stub1(isolate, kDontSaveFPRegs);
stub1.GetCode(isolate);
- RestoreRegistersStateStub stub2(kSaveFPRegs);
+ RestoreRegistersStateStub stub2(isolate, kSaveFPRegs);
stub2.GetCode(isolate);
}
void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
- CEntryStub stub(1, kDontSaveFPRegs);
+ CEntryStub stub(isolate, 1, kDontSaveFPRegs);
stub.GetCode(isolate);
- CEntryStub stub_fp(1, kSaveFPRegs);
+ CEntryStub stub_fp(isolate, 1, kSaveFPRegs);
stub_fp.GetCode(isolate);
}
ASSERT(csp.Is(__ StackPointer()));
- Isolate* isolate = masm->isolate();
-
// Prepare AAPCS64 arguments to pass to the builtin.
__ Mov(x0, argc);
__ Mov(x1, argv);
- __ Mov(x2, ExternalReference::isolate_address(isolate));
+ __ Mov(x2, ExternalReference::isolate_address(isolate()));
Label return_location;
__ Adr(x12, &return_location);
// Retrieve the pending exception.
ExternalReference pending_exception_address(
- Isolate::kPendingExceptionAddress, isolate);
+ Isolate::kPendingExceptionAddress, isolate());
const Register& exception = result;
const Register& exception_address = x11;
__ Mov(exception_address, Operand(pending_exception_address));
__ Ldr(exception, MemOperand(exception_address));
// Clear the pending exception.
- __ Mov(x10, Operand(isolate->factory()->the_hole_value()));
+ __ Mov(x10, Operand(isolate()->factory()->the_hole_value()));
__ Str(x10, MemOperand(exception_address));
// x0 exception The exception descriptor.
// Special handling of termination exceptions, which are uncatchable by
// JavaScript code.
Label throw_termination_exception;
- __ Cmp(exception, Operand(isolate->factory()->termination_exception()));
+ __ Cmp(exception, Operand(isolate()->factory()->termination_exception()));
__ B(eq, &throw_termination_exception);
// We didn't execute a return case, so the stack frame hasn't been updated
__ Fmov(fp_zero, 0.0);
// Build an entry frame (see layout below).
- Isolate* isolate = masm->isolate();
-
- // Build an entry frame.
int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
int64_t bad_frame_pointer = -1L; // Bad frame pointer to fail if it is used.
__ Mov(x13, bad_frame_pointer);
__ Mov(x12, Smi::FromInt(marker));
- __ Mov(x11, ExternalReference(Isolate::kCEntryFPAddress, isolate));
+ __ Mov(x11, ExternalReference(Isolate::kCEntryFPAddress, isolate()));
__ Ldr(x10, MemOperand(x11));
__ Push(x13, xzr, x12, x10);
// Push the JS entry frame marker. Also set js_entry_sp if this is the
// outermost JS call.
Label non_outermost_js, done;
- ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate);
+ ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate());
__ Mov(x10, ExternalReference(js_entry_sp));
__ Ldr(x11, MemOperand(x10));
__ Cbnz(x11, &non_outermost_js);
// fp will be invalid because the PushTryHandler below sets it to 0 to
// signal the existence of the JSEntry frame.
__ Mov(x10, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
- isolate)));
+ isolate())));
}
__ Str(code_entry, MemOperand(x10));
__ LoadRoot(x0, Heap::kExceptionRootIndex);
// saved values before returning a failure to C.
// Clear any pending exceptions.
- __ Mov(x10, Operand(isolate->factory()->the_hole_value()));
+ __ Mov(x10, Operand(isolate()->factory()->the_hole_value()));
__ Mov(x11, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
- isolate)));
+ isolate())));
__ Str(x10, MemOperand(x11));
// Invoke the function by calling through the JS entry trampoline builtin.
// x4: argv.
ExternalReference entry(is_construct ? Builtins::kJSConstructEntryTrampoline
: Builtins::kJSEntryTrampoline,
- isolate);
+ isolate());
__ Mov(x10, entry);
// Call the JSEntryTrampoline.
// Restore the top frame descriptors from the stack.
__ Pop(x10);
- __ Mov(x11, ExternalReference(Isolate::kCEntryFPAddress, isolate));
+ __ Mov(x11, ExternalReference(Isolate::kCEntryFPAddress, isolate()));
__ Str(x10, MemOperand(x11));
// Reset the stack to the callee saved registers.
// -----------------------------------
Register key = x0;
receiver = x1;
- __ Cmp(key, Operand(masm->isolate()->factory()->prototype_string()));
+ __ Cmp(key, Operand(isolate()->factory()->prototype_string()));
__ B(ne, &miss);
} else {
ASSERT(kind() == Code::LOAD_IC);
__ Mov(result, res_false);
// Null is not instance of anything.
- __ Cmp(object_type, Operand(masm->isolate()->factory()->null_value()));
+ __ Cmp(object_type, Operand(isolate()->factory()->null_value()));
__ B(ne, &object_not_null);
__ Ret();
const int kJSRegExpOffset = 7 * kPointerSize;
// Ensure that a RegExp stack is allocated.
- Isolate* isolate = masm->isolate();
ExternalReference address_of_regexp_stack_memory_address =
- ExternalReference::address_of_regexp_stack_memory_address(isolate);
+ ExternalReference::address_of_regexp_stack_memory_address(isolate());
ExternalReference address_of_regexp_stack_memory_size =
- ExternalReference::address_of_regexp_stack_memory_size(isolate);
+ ExternalReference::address_of_regexp_stack_memory_size(isolate());
__ Mov(x10, address_of_regexp_stack_memory_size);
__ Ldr(x10, MemOperand(x10));
__ Cbz(x10, &runtime);
__ JumpIfSmi(code_object, &runtime);
// All checks done. Now push arguments for native regexp code.
- __ IncrementCounter(isolate->counters()->regexp_entry_native(), 1,
+ __ IncrementCounter(isolate()->counters()->regexp_entry_native(), 1,
x10,
x11);
// csp[0]: Space for the return address placed by DirectCEntryStub.
// csp[8]: Argument 9, the current isolate address.
- __ Mov(x10, ExternalReference::isolate_address(isolate));
+ __ Mov(x10, ExternalReference::isolate_address(isolate()));
__ Poke(x10, kPointerSize);
Register length = w11;
__ Add(x3, x2, Operand(w10, UXTW));
// Argument 5 (x4): static offsets vector buffer.
- __ Mov(x4, ExternalReference::address_of_static_offsets_vector(isolate));
+ __ Mov(x4, ExternalReference::address_of_static_offsets_vector(isolate()));
// Argument 6 (x5): Set the number of capture registers to zero to force
// global regexps to behave as non-global. This stub is not used for global
// Locate the code entry and call it.
__ Add(code_object, code_object, Code::kHeaderSize - kHeapObjectTag);
- DirectCEntryStub stub;
+ DirectCEntryStub stub(isolate());
stub.GenerateCall(masm, code_object);
__ LeaveExitFrame(false, x10, true);
// Get the static offsets vector filled by the native regexp code
// and fill the last match info.
ExternalReference address_of_static_offsets_vector =
- ExternalReference::address_of_static_offsets_vector(isolate);
+ ExternalReference::address_of_static_offsets_vector(isolate());
__ Mov(offsets_vector_index, address_of_static_offsets_vector);
Label next_capture, done;
// A stack overflow (on the backtrack stack) may have occured
// in the RegExp code but no exception has been created yet.
// If there is no pending exception, handle that in the runtime system.
- __ Mov(x10, Operand(isolate->factory()->the_hole_value()));
+ __ Mov(x10, Operand(isolate()->factory()->the_hole_value()));
__ Mov(x11,
Operand(ExternalReference(Isolate::kPendingExceptionAddress,
- isolate)));
+ isolate())));
__ Ldr(exception_value, MemOperand(x11));
__ Cmp(x10, exception_value);
__ B(eq, &runtime);
__ ThrowUncatchable(exception_value, x10, x11, x12, x13);
__ Bind(&failure);
- __ Mov(x0, Operand(masm->isolate()->factory()->null_value()));
+ __ Mov(x0, Operand(isolate()->factory()->null_value()));
__ PopCPURegList(used_callee_saved_registers);
// Drop the 4 arguments of the stub from the stack.
__ Drop(4);
// slot.
{
FrameScope scope(masm, StackFrame::INTERNAL);
- CreateAllocationSiteStub create_stub;
+ CreateAllocationSiteStub create_stub(masm->isolate());
// Arguments register must be smi-tagged to call out.
__ SmiTag(argc);
// If there is a call target cache, mark it megamorphic in the
// non-function case. MegamorphicSentinel is an immortal immovable object
// (megamorphic symbol) so no write barrier is needed.
- ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(masm->isolate()),
- masm->isolate()->heap()->megamorphic_symbol());
+ ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(isolate()),
+ isolate()->heap()->megamorphic_symbol());
__ Add(x12, cache_cell, Operand::UntagSmiAndScale(slot,
kPointerSizeLog2));
__ LoadRoot(x11, Heap::kMegamorphicSymbolRootIndex);
__ GetBuiltinFunction(x1, Builtins::CALL_FUNCTION_PROXY);
{
Handle<Code> adaptor =
- masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
+ isolate()->builtins()->ArgumentsAdaptorTrampoline();
__ Jump(adaptor, RelocInfo::CODE_TARGET);
}
__ Mov(x0, argc_); // Set up the number of arguments.
__ Mov(x2, 0);
__ GetBuiltinFunction(function, Builtins::CALL_NON_FUNCTION);
- __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
+ __ Jump(isolate()->builtins()->ArgumentsAdaptorTrampoline(),
RelocInfo::CODE_TARGET);
}
__ Bind(&do_call);
// Set expected number of arguments to zero (not changing x0).
__ Mov(x2, 0);
- __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
+ __ Jump(isolate()->builtins()->ArgumentsAdaptorTrampoline(),
RelocInfo::CODE_TARGET);
}
__ Ret();
__ Bind(&unordered);
- ICCompareStub stub(op_, CompareIC::GENERIC, CompareIC::GENERIC,
+ ICCompareStub stub(isolate(), op_, CompareIC::GENERIC, CompareIC::GENERIC,
CompareIC::GENERIC);
- __ Jump(stub.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
+ __ Jump(stub.GetCode(isolate()), RelocInfo::CODE_TARGET);
__ Bind(&maybe_undefined1);
if (Token::IsOrderedRelationalCompareOp(op_)) {
Register stub_entry = x11;
{
ExternalReference miss =
- ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate());
+ ExternalReference(IC_Utility(IC::kCompareIC_Miss), isolate());
FrameScope scope(masm, StackFrame::INTERNAL);
Register op = x10;
__ CopyBytes(result_char0, substring_char0, result_length, x3, kCopyLong);
__ Bind(&return_x0);
- Counters* counters = masm->isolate()->counters();
+ Counters* counters = isolate()->counters();
__ IncrementCounter(counters->sub_string_native(), 1, x3, x4);
__ Drop(3);
__ Ret();
void StringCompareStub::Generate(MacroAssembler* masm) {
Label runtime;
- Counters* counters = masm->isolate()->counters();
+ Counters* counters = isolate()->counters();
// Stack frame on entry.
// sp[0]: right string
// -- x0 : right
// -- lr : return address
// -----------------------------------
- Isolate* isolate = masm->isolate();
// Load x2 with the allocation site. We stick an undefined dummy value here
// and replace it with the real allocation site later when we instantiate this
// stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate().
- __ LoadObject(x2, handle(isolate->heap()->undefined_value()));
+ __ LoadObject(x2, handle(isolate()->heap()->undefined_value()));
// Make sure that we actually patched the allocation site.
if (FLAG_debug_code) {
// Tail call into the stub that handles binary operations with allocation
// sites.
- BinaryOpWithAllocationSiteStub stub(state_);
+ BinaryOpWithAllocationSiteStub stub(isolate(), state_);
__ TailCallStub(&stub);
}
__ Mov(address, regs_.address());
__ Mov(x0, regs_.object());
__ Mov(x1, address);
- __ Mov(x2, ExternalReference::isolate_address(masm->isolate()));
+ __ Mov(x2, ExternalReference::isolate_address(isolate()));
AllowExternalCallThatCantCauseGC scope(masm);
ExternalReference function =
ExternalReference::incremental_marking_record_write_function(
- masm->isolate());
+ isolate());
__ CallCFunction(function, 3, 0);
regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_);
void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
- CEntryStub ces(1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs);
- __ Call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
+ CEntryStub ces(isolate(), 1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs);
+ __ Call(ces.GetCode(isolate()), RelocInfo::CODE_TARGET);
int parameter_count_offset =
StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset;
__ Ldr(x1, MemOperand(fp, parameter_count_offset));
void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
if (masm->isolate()->function_entry_hook() != NULL) {
- ProfileEntryHookStub stub;
+ ProfileEntryHookStub stub(masm->isolate());
Assembler::BlockConstPoolScope no_const_pools(masm);
Label entry_hook_call_start;
__ Bind(&entry_hook_call_start);
#if V8_HOST_ARCH_ARM64
uintptr_t entry_hook =
- reinterpret_cast<uintptr_t>(masm->isolate()->function_entry_hook());
+ reinterpret_cast<uintptr_t>(isolate()->function_entry_hook());
__ Mov(x10, entry_hook);
#else
// Under the simulator we need to indirect the entry hook through a trampoline
ApiFunction dispatcher(FUNCTION_ADDR(EntryHookTrampoline));
__ Mov(x10, Operand(ExternalReference(&dispatcher,
ExternalReference::BUILTIN_CALL,
- masm->isolate())));
+ isolate())));
// It additionally takes an isolate as a third parameter
- __ Mov(x2, ExternalReference::isolate_address(masm->isolate()));
+ __ Mov(x2, ExternalReference::isolate_address(isolate()));
#endif
// The caller's return address is above the saved temporaries.
ASSERT(csp.Is(__ StackPointer()));
intptr_t code =
- reinterpret_cast<intptr_t>(GetCode(masm->isolate()).location());
+ reinterpret_cast<intptr_t>(GetCode(isolate()).location());
__ Mov(lr, Operand(code, RelocInfo::CODE_TARGET));
__ Mov(x10, target);
// Branch to the stub.
}
Label not_found;
- NameDictionaryLookupStub stub(POSITIVE_LOOKUP);
+ NameDictionaryLookupStub stub(masm->isolate(), POSITIVE_LOOKUP);
__ CallStub(&stub);
__ Cbz(x0, ¬_found);
__ Mov(scratch2, x2); // Move entry index into scratch2.
__ Ldr(x0, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
__ Mov(x1, Operand(name));
- NameDictionaryLookupStub stub(NEGATIVE_LOOKUP);
+ NameDictionaryLookupStub stub(masm->isolate(), NEGATIVE_LOOKUP);
__ CallStub(&stub);
// Move stub return value to scratch0. Note that scratch0 is not included in
// spill_list and won't be clobbered by PopCPURegList.
AllocationSiteOverrideMode mode) {
ASM_LOCATION("CreateArrayDispatch");
if (mode == DISABLE_ALLOCATION_SITES) {
- T stub(GetInitialFastElementsKind(), mode);
+ T stub(masm->isolate(), GetInitialFastElementsKind(), mode);
__ TailCallStub(&stub);
} else if (mode == DONT_OVERRIDE) {
// TODO(jbramley): Is this the best way to handle this? Can we make the
// tail calls conditional, rather than hopping over each one?
__ CompareAndBranch(kind, candidate_kind, ne, &next);
- T stub(candidate_kind);
+ T stub(masm->isolate(), candidate_kind);
__ TailCallStub(&stub);
__ Bind(&next);
}
ElementsKind initial = GetInitialFastElementsKind();
ElementsKind holey_initial = GetHoleyElementsKind(initial);
- ArraySingleArgumentConstructorStub stub_holey(holey_initial,
+ ArraySingleArgumentConstructorStub stub_holey(masm->isolate(),
+ holey_initial,
DISABLE_ALLOCATION_SITES);
__ TailCallStub(&stub_holey);
__ Bind(&normal_sequence);
- ArraySingleArgumentConstructorStub stub(initial,
+ ArraySingleArgumentConstructorStub stub(masm->isolate(),
+ initial,
DISABLE_ALLOCATION_SITES);
__ TailCallStub(&stub);
} else if (mode == DONT_OVERRIDE) {
Label next;
ElementsKind candidate_kind = GetFastElementsKindFromSequenceIndex(i);
__ CompareAndBranch(kind, candidate_kind, ne, &next);
- ArraySingleArgumentConstructorStub stub(candidate_kind);
+ ArraySingleArgumentConstructorStub stub(masm->isolate(), candidate_kind);
__ TailCallStub(&stub);
__ Bind(&next);
}
TERMINAL_FAST_ELEMENTS_KIND);
for (int i = 0; i <= to_index; ++i) {
ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
- T stub(kind);
+ T stub(isolate, kind);
stub.GetCode(isolate);
if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) {
- T stub1(kind, DISABLE_ALLOCATION_SITES);
+ T stub1(isolate, kind, DISABLE_ALLOCATION_SITES);
stub1.GetCode(isolate);
}
}
ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS };
for (int i = 0; i < 2; i++) {
// For internal arrays we only need a few things
- InternalArrayNoArgumentConstructorStub stubh1(kinds[i]);
+ InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[i]);
stubh1.GetCode(isolate);
- InternalArraySingleArgumentConstructorStub stubh2(kinds[i]);
+ InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]);
stubh2.GetCode(isolate);
- InternalArrayNArgumentsConstructorStub stubh3(kinds[i]);
+ InternalArrayNArgumentsConstructorStub stubh3(isolate, kinds[i]);
stubh3.GetCode(isolate);
}
}
__ Cbz(x10, &packed_case);
InternalArraySingleArgumentConstructorStub
- stub1_holey(GetHoleyElementsKind(kind));
+ stub1_holey(isolate(), GetHoleyElementsKind(kind));
__ TailCallStub(&stub1_holey);
__ Bind(&packed_case);
}
- InternalArraySingleArgumentConstructorStub stub1(kind);
+ InternalArraySingleArgumentConstructorStub stub1(isolate(), kind);
__ TailCallStub(&stub1);
__ Bind(&zero_case);
// No arguments.
- InternalArrayNoArgumentConstructorStub stub0(kind);
+ InternalArrayNoArgumentConstructorStub stub0(isolate(), kind);
__ TailCallStub(&stub0);
__ Bind(&n_case);
// N arguments.
- InternalArrayNArgumentsConstructorStub stubN(kind);
+ InternalArrayNArgumentsConstructorStub stubN(isolate(), kind);
__ TailCallStub(&stubN);
}
// -- sp[0] : return address
// -- sp[4] : last argument
// -----------------------------------
- Handle<Object> undefined_sentinel(
- masm->isolate()->heap()->undefined_value(), masm->isolate());
Register constructor = x1;
STATIC_ASSERT(FCA::kHolderIndex == 0);
STATIC_ASSERT(FCA::kArgsLength == 7);
- Isolate* isolate = masm->isolate();
-
// FunctionCallbackArguments: context, callee and call data.
__ Push(context, callee, call_data);
__ LoadRoot(call_data, Heap::kUndefinedValueRootIndex);
}
Register isolate_reg = x5;
- __ Mov(isolate_reg, ExternalReference::isolate_address(isolate));
+ __ Mov(isolate_reg, ExternalReference::isolate_address(isolate()));
// FunctionCallbackArguments:
// return value, return value default, isolate, holder.
ExternalReference::Type thunk_type = ExternalReference::PROFILING_API_CALL;
ApiFunction thunk_fun(thunk_address);
ExternalReference thunk_ref = ExternalReference(&thunk_fun, thunk_type,
- masm->isolate());
+ isolate());
AllowExternalCallThatCantCauseGC scope(masm);
MemOperand context_restore_operand(
ExternalReference::PROFILING_GETTER_CALL;
ApiFunction thunk_fun(thunk_address);
ExternalReference thunk_ref = ExternalReference(&thunk_fun, thunk_type,
- masm->isolate());
+ isolate());
const int spill_offset = 1 + kApiStackSpace;
__ CallApiFunctionAndReturn(api_function_address,
class StoreBufferOverflowStub: public PlatformCodeStub {
public:
- explicit StoreBufferOverflowStub(SaveFPRegsMode save_fp)
- : save_doubles_(save_fp) { }
+ StoreBufferOverflowStub(Isolate* isolate, SaveFPRegsMode save_fp)
+ : PlatformCodeStub(isolate), save_doubles_(save_fp) { }
void Generate(MacroAssembler* masm);
class StoreRegistersStateStub: public PlatformCodeStub {
public:
- explicit StoreRegistersStateStub(SaveFPRegsMode with_fp)
- : save_doubles_(with_fp) {}
+ StoreRegistersStateStub(Isolate* isolate, SaveFPRegsMode with_fp)
+ : PlatformCodeStub(isolate), save_doubles_(with_fp) {}
static Register to_be_pushed_lr() { return ip0; }
static void GenerateAheadOfTime(Isolate* isolate);
class RestoreRegistersStateStub: public PlatformCodeStub {
public:
- explicit RestoreRegistersStateStub(SaveFPRegsMode with_fp)
- : save_doubles_(with_fp) {}
+ RestoreRegistersStateStub(Isolate* isolate, SaveFPRegsMode with_fp)
+ : PlatformCodeStub(isolate), save_doubles_(with_fp) {}
static void GenerateAheadOfTime(Isolate* isolate);
private:
// Stub to record the write of 'value' at 'address' in 'object'.
// Typically 'address' = 'object' + <some offset>.
// See MacroAssembler::RecordWriteField() for example.
- RecordWriteStub(Register object,
+ RecordWriteStub(Isolate* isolate,
+ Register object,
Register value,
Register address,
RememberedSetAction remembered_set_action,
SaveFPRegsMode fp_mode)
- : object_(object),
+ : PlatformCodeStub(isolate),
+ object_(object),
value_(value),
address_(address),
remembered_set_action_(remembered_set_action),
// the exit frame before doing the call with GenerateCall.
class DirectCEntryStub: public PlatformCodeStub {
public:
- DirectCEntryStub() {}
+ explicit DirectCEntryStub(Isolate* isolate) : PlatformCodeStub(isolate) {}
void Generate(MacroAssembler* masm);
void GenerateCall(MacroAssembler* masm, Register target);
public:
enum LookupMode { POSITIVE_LOOKUP, NEGATIVE_LOOKUP };
- explicit NameDictionaryLookupStub(LookupMode mode) : mode_(mode) { }
+ NameDictionaryLookupStub(Isolate* isolate, LookupMode mode)
+ : PlatformCodeStub(isolate), mode_(mode) { }
void Generate(MacroAssembler* masm);
class SubStringStub: public PlatformCodeStub {
public:
- SubStringStub() {}
+ explicit SubStringStub(Isolate* isolate) : PlatformCodeStub(isolate) {}
private:
Major MajorKey() { return SubString; }
class StringCompareStub: public PlatformCodeStub {
public:
- StringCompareStub() { }
+ explicit StringCompareStub(Isolate* isolate) : PlatformCodeStub(isolate) { }
// Compares two flat ASCII strings and returns result in x0.
static void GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
__ Mov(x0, 0); // No arguments.
__ Mov(x1, ExternalReference::debug_break(masm->isolate()));
- CEntryStub stub(1);
+ CEntryStub stub(masm->isolate(), 1);
__ CallStub(&stub);
// Restore the register values from the expression stack.
__ Push(x1, x10);
__ CallRuntime(Runtime::kHiddenNewGlobalContext, 2);
} else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
- FastNewContextStub stub(heap_slots);
+ FastNewContextStub stub(isolate(), heap_slots);
__ CallStub(&stub);
} else {
__ Push(x1);
} else {
type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
}
- ArgumentsAccessStub stub(type);
+ ArgumentsAccessStub stub(isolate(), type);
__ CallStub(&stub);
SetVar(arguments, x0, x1, x2);
!pretenure &&
scope()->is_function_scope() &&
info->num_literals() == 0) {
- FastNewClosureStub stub(info->strict_mode(), info->is_generator());
+ FastNewClosureStub stub(isolate(),
+ info->strict_mode(),
+ info->is_generator());
__ Mov(x2, Operand(info));
__ CallStub(&stub);
} else {
__ Push(x3, x2, x1, x0);
__ CallRuntime(Runtime::kHiddenCreateObjectLiteral, 4);
} else {
- FastCloneShallowObjectStub stub(properties_count);
+ FastCloneShallowObjectStub stub(isolate(), properties_count);
__ CallStub(&stub);
}
if (has_fast_elements && constant_elements_values->map() ==
isolate()->heap()->fixed_cow_array_map()) {
FastCloneShallowArrayStub stub(
+ isolate(),
FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
allocation_site_mode,
length);
mode = FastCloneShallowArrayStub::CLONE_ELEMENTS;
}
- FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
+ FastCloneShallowArrayStub stub(isolate(),
+ mode,
+ allocation_site_mode,
+ length);
__ CallStub(&stub);
}
EMIT_REMEMBERED_SET, INLINE_SMI_CHECK);
} else {
__ Mov(x3, Smi::FromInt(i));
- StoreArrayLiteralElementStub stub;
+ StoreArrayLiteralElementStub stub(isolate());
__ CallStub(&stub);
}
patch_site.EmitJumpIfSmi(x10, &both_smis);
__ Bind(&stub_call);
- BinaryOpICStub stub(op, mode);
+ BinaryOpICStub stub(isolate(), op, mode);
{
Assembler::BlockPoolsScope scope(masm_);
CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
Token::Value op,
OverwriteMode mode) {
__ Pop(x1);
- BinaryOpICStub stub(op, mode);
+ BinaryOpICStub stub(isolate(), op, mode);
JumpPatchSite patch_site(masm_); // Unbound, signals no inlined smi code.
{
Assembler::BlockPoolsScope scope(masm_);
// Record source position for debugger.
SetSourcePosition(expr->position());
- CallFunctionStub stub(arg_count, flags);
+ CallFunctionStub stub(isolate(), arg_count, flags);
__ Peek(x1, (arg_count + 1) * kPointerSize);
__ CallStub(&stub);
// Record source position for debugger.
SetSourcePosition(expr->position());
- CallFunctionStub stub(arg_count, CALL_AS_METHOD);
+ CallFunctionStub stub(isolate(), arg_count, CALL_AS_METHOD);
__ Peek(x1, (arg_count + 1) * kPointerSize);
__ CallStub(&stub);
__ Mov(x3, Smi::FromInt(expr->CallFeedbackSlot()));
// Record call targets in unoptimized code.
- CallFunctionStub stub(arg_count, RECORD_CALL_TARGET);
+ CallFunctionStub stub(isolate(), arg_count, RECORD_CALL_TARGET);
__ Peek(x1, (arg_count + 1) * kXRegSize);
__ CallStub(&stub);
RecordJSReturnSite(expr);
SetSourcePosition(expr->position());
// Call the evaluated function.
- CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS);
+ CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
__ Peek(x1, (arg_count + 1) * kXRegSize);
__ CallStub(&stub);
RecordJSReturnSite(expr);
__ LoadObject(x2, FeedbackVector());
__ Mov(x3, Smi::FromInt(expr->CallNewFeedbackSlot()));
- CallConstructStub stub(RECORD_CALL_TARGET);
+ CallConstructStub stub(isolate(), RECORD_CALL_TARGET);
__ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL);
PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
context()->Plug(x0);
VisitForAccumulatorValue(args->at(0));
__ Mov(x1, x0);
__ Mov(x0, Smi::FromInt(info_->scope()->num_parameters()));
- ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
+ ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
__ CallStub(&stub);
context()->Plug(x0);
}
void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
// Load the arguments on the stack and call the stub.
- SubStringStub stub;
+ SubStringStub stub(isolate());
ZoneList<Expression*>* args = expr->arguments();
ASSERT(args->length() == 3);
VisitForStackValue(args->at(0));
void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
// Load the arguments on the stack and call the stub.
- RegExpExecStub stub;
+ RegExpExecStub stub(isolate());
ZoneList<Expression*>* args = expr->arguments();
ASSERT(args->length() == 4);
VisitForStackValue(args->at(0));
ASSERT(args->length() == 2);
VisitForStackValue(args->at(0));
VisitForStackValue(args->at(1));
- MathPowStub stub(MathPowStub::ON_STACK);
+ MathPowStub stub(isolate(), MathPowStub::ON_STACK);
__ CallStub(&stub);
context()->Plug(x0);
}
// Load the argument into x0 and call the stub.
VisitForAccumulatorValue(args->at(0));
- NumberToStringStub stub;
+ NumberToStringStub stub(isolate());
__ CallStub(&stub);
context()->Plug(x0);
}
VisitForAccumulatorValue(args->at(1));
__ Pop(x1);
- StringAddStub stub(STRING_ADD_CHECK_BOTH, NOT_TENURED);
+ StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
__ CallStub(&stub);
context()->Plug(x0);
VisitForStackValue(args->at(0));
VisitForStackValue(args->at(1));
- StringCompareStub stub;
+ StringCompareStub stub(isolate());
__ CallStub(&stub);
context()->Plug(x0);
}
void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
- RegExpConstructResultStub stub;
+ RegExpConstructResultStub stub(isolate());
ZoneList<Expression*>* args = expr->arguments();
ASSERT(args->length() == 3);
VisitForStackValue(args->at(0));
// Record source position of the IC call.
SetSourcePosition(expr->position());
- CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS);
+ CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
__ Peek(x1, (arg_count + 1) * kPointerSize);
__ CallStub(&stub);
__ B(&stub_call);
__ Bind(&slow);
}
- ToNumberStub convert_stub;
+ ToNumberStub convert_stub(isolate());
__ CallStub(&convert_stub);
// Save result for postfix expressions.
{
Assembler::BlockPoolsScope scope(masm_);
- BinaryOpICStub stub(Token::ADD, NO_OVERWRITE);
+ BinaryOpICStub stub(isolate(), Token::ADD, NO_OVERWRITE);
CallIC(stub.GetCode(isolate()), expr->CountBinOpFeedbackId());
patch_site.EmitPatchInfo();
}
case Token::INSTANCEOF: {
VisitForStackValue(expr->right());
- InstanceofStub stub(InstanceofStub::kNoFlags);
+ InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
__ CallStub(&stub);
PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
// The stub returns 0 for true.
CallIC(ic, TypeFeedbackId::None());
__ Mov(x1, x0);
__ Poke(x1, 2 * kPointerSize);
- CallFunctionStub stub(1, CALL_AS_METHOD);
+ CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
__ CallStub(&stub);
__ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
ASSERT(ToRegister(instr->result()).Is(x0));
int arity = instr->arity();
- CallFunctionStub stub(arity, instr->hydrogen()->function_flags());
+ CallFunctionStub stub(isolate(), arity, instr->hydrogen()->function_flags());
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
}
// No cell in x2 for construct type feedback in optimized code.
__ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
- CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
+ CallConstructStub stub(isolate(), NO_CALL_FUNCTION_FLAGS);
CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
ASSERT(ToRegister(instr->result()).is(x0));
: DONT_OVERRIDE;
if (instr->arity() == 0) {
- ArrayNoArgumentConstructorStub stub(kind, override_mode);
+ ArrayNoArgumentConstructorStub stub(isolate(), kind, override_mode);
CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
} else if (instr->arity() == 1) {
Label done;
__ Cbz(x10, &packed_case);
ElementsKind holey_kind = GetHoleyElementsKind(kind);
- ArraySingleArgumentConstructorStub stub(holey_kind, override_mode);
+ ArraySingleArgumentConstructorStub stub(isolate(),
+ holey_kind,
+ override_mode);
CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
__ B(&done);
__ Bind(&packed_case);
}
- ArraySingleArgumentConstructorStub stub(kind, override_mode);
+ ArraySingleArgumentConstructorStub stub(isolate(), kind, override_mode);
CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
__ Bind(&done);
} else {
- ArrayNArgumentsConstructorStub stub(kind, override_mode);
+ ArrayNArgumentsConstructorStub stub(isolate(), kind, override_mode);
CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
}
Comment(";;; Allocate local context");
// Argument to NewContext is the function, which is in x1.
if (heap_slots <= FastNewContextStub::kMaximumSlots) {
- FastNewContextStub stub(heap_slots);
+ FastNewContextStub stub(isolate(), heap_slots);
__ CallStub(&stub);
} else {
__ Push(x1);
ASSERT(ToRegister(instr->right()).is(x0));
ASSERT(ToRegister(instr->result()).is(x0));
- BinaryOpICStub stub(instr->op(), NO_OVERWRITE);
+ BinaryOpICStub stub(isolate(), instr->op(), NO_OVERWRITE);
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
}
ASSERT(ToRegister(instr->result()).is(x0));
switch (instr->hydrogen()->major_key()) {
case CodeStub::RegExpExec: {
- RegExpExecStub stub;
+ RegExpExecStub stub(isolate());
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
break;
}
case CodeStub::SubString: {
- SubStringStub stub;
+ SubStringStub stub(isolate());
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
break;
}
case CodeStub::StringCompare: {
- StringCompareStub stub;
+ StringCompareStub stub(isolate());
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
break;
}
// space for nested functions that don't need literals cloning.
bool pretenure = instr->hydrogen()->pretenure();
if (!pretenure && instr->hydrogen()->has_no_literals()) {
- FastNewClosureStub stub(instr->hydrogen()->strict_mode(),
+ FastNewClosureStub stub(isolate(),
+ instr->hydrogen()->strict_mode(),
instr->hydrogen()->is_generator());
__ Mov(x2, Operand(instr->hydrogen()->shared_info()));
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
ASSERT(ToRegister(instr->left()).Is(InstanceofStub::left()));
ASSERT(ToRegister(instr->right()).Is(InstanceofStub::right()));
- InstanceofStub stub(InstanceofStub::kArgsInRegisters);
+ InstanceofStub stub(isolate(), InstanceofStub::kArgsInRegisters);
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
// InstanceofStub returns a result in x0:
ASSERT(ToRegister(instr->value()).Is(InstanceofStub::left()));
__ LoadObject(InstanceofStub::right(), instr->function());
- InstanceofStub stub(flags);
+ InstanceofStub stub(isolate(), flags);
CallCodeGeneric(stub.GetCode(isolate()),
RelocInfo::CODE_TARGET,
instr,
ASSERT(ToDoubleRegister(instr->result()).is(d0));
if (exponent_type.IsSmi()) {
- MathPowStub stub(MathPowStub::TAGGED);
+ MathPowStub stub(isolate(), MathPowStub::TAGGED);
__ CallStub(&stub);
} else if (exponent_type.IsTagged()) {
Label no_deopt;
DeoptimizeIfNotRoot(x0, Heap::kHeapNumberMapRootIndex,
instr->environment());
__ Bind(&no_deopt);
- MathPowStub stub(MathPowStub::TAGGED);
+ MathPowStub stub(isolate(), MathPowStub::TAGGED);
__ CallStub(&stub);
} else if (exponent_type.IsInteger32()) {
// Ensure integer exponent has no garbage in top 32-bits, as MathPowStub
// supports large integer exponents.
Register exponent = ToRegister(instr->right());
__ Sxtw(exponent, exponent);
- MathPowStub stub(MathPowStub::INTEGER);
+ MathPowStub stub(isolate(), MathPowStub::INTEGER);
__ CallStub(&stub);
} else {
ASSERT(exponent_type.IsDouble());
- MathPowStub stub(MathPowStub::DOUBLE);
+ MathPowStub stub(isolate(), MathPowStub::DOUBLE);
__ CallStub(&stub);
}
}
ASSERT(ToRegister(instr->context()).is(cp));
ASSERT(ToRegister(instr->left()).Is(x1));
ASSERT(ToRegister(instr->right()).Is(x0));
- StringAddStub stub(instr->hydrogen()->flags(),
+ StringAddStub stub(isolate(),
+ instr->hydrogen()->flags(),
instr->hydrogen()->pretenure_flag());
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
}
this, Safepoint::kWithRegistersAndDoubles);
__ Mov(x1, Operand(to_map));
bool is_js_array = from_map->instance_type() == JS_ARRAY_TYPE;
- TransitionElementsKindStub stub(from_kind, to_kind, is_js_array);
+ TransitionElementsKindStub stub(isolate(), from_kind, to_kind, is_js_array);
__ CallStub(&stub);
RecordSafepointWithRegistersAndDoubles(
instr->pointer_map(), 0, Safepoint::kLazyDeopt);
codegen_->masm_->Mov(to_be_pushed_lr, lr);
switch (codegen_->expected_safepoint_kind_) {
case Safepoint::kWithRegisters: {
- StoreRegistersStateStub stub(kDontSaveFPRegs);
+ StoreRegistersStateStub stub(codegen_->isolate(), kDontSaveFPRegs);
codegen_->masm_->CallStub(&stub);
break;
}
case Safepoint::kWithRegistersAndDoubles: {
- StoreRegistersStateStub stub(kSaveFPRegs);
+ StoreRegistersStateStub stub(codegen_->isolate(), kSaveFPRegs);
codegen_->masm_->CallStub(&stub);
break;
}
ASSERT((kind & Safepoint::kWithRegisters) != 0);
switch (kind) {
case Safepoint::kWithRegisters: {
- RestoreRegistersStateStub stub(kDontSaveFPRegs);
+ RestoreRegistersStateStub stub(codegen_->isolate(), kDontSaveFPRegs);
codegen_->masm_->CallStub(&stub);
break;
}
case Safepoint::kWithRegistersAndDoubles: {
- RestoreRegistersStateStub stub(kSaveFPRegs);
+ RestoreRegistersStateStub stub(codegen_->isolate(), kSaveFPRegs);
codegen_->masm_->CallStub(&stub);
break;
}
Mov(x0, num_arguments);
Mov(x1, ExternalReference(f, isolate()));
- CEntryStub stub(1, save_doubles);
+ CEntryStub stub(isolate(), 1, save_doubles);
CallStub(&stub);
}
// Native call returns to the DirectCEntry stub which redirects to the
// return address pushed on stack (could have moved after GC).
// DirectCEntry stub itself is generated early and never moves.
- DirectCEntryStub stub;
+ DirectCEntryStub stub(isolate());
stub.GenerateCall(this, x3);
if (FLAG_log_timer_events) {
Mov(x0, num_arguments);
Mov(x1, ext);
- CEntryStub stub(1);
+ CEntryStub stub(isolate(), 1);
CallStub(&stub);
}
void MacroAssembler::JumpToExternalReference(const ExternalReference& builtin) {
Mov(x1, builtin);
- CEntryStub stub(1);
+ CEntryStub stub(isolate(), 1);
Jump(stub.GetCode(isolate()), RelocInfo::CODE_TARGET);
}
Push(lr);
Push(double_input); // Put input on stack.
- DoubleToIStub stub(jssp,
+ DoubleToIStub stub(isolate(),
+ jssp,
result,
0,
true, // is_truncating
// If we fell through then inline version didn't succeed - call stub instead.
Push(lr);
- DoubleToIStub stub(object,
+ DoubleToIStub stub(isolate(),
+ object,
result,
HeapNumber::kValueOffset - kHeapObjectTag,
true, // is_truncating
void MacroAssembler::DebugBreak() {
Mov(x0, 0);
Mov(x1, ExternalReference(Runtime::kDebugBreak, isolate()));
- CEntryStub ces(1);
+ CEntryStub ces(isolate(), 1);
ASSERT(AllowThisStubCall(&ces));
Call(ces.GetCode(isolate()), RelocInfo::DEBUG_BREAK);
}
Bind(&store_buffer_overflow);
Push(lr);
StoreBufferOverflowStub store_buffer_overflow_stub =
- StoreBufferOverflowStub(fp_mode);
+ StoreBufferOverflowStub(isolate(), fp_mode);
CallStub(&store_buffer_overflow_stub);
Pop(lr);
if (lr_status == kLRHasNotBeenSaved) {
Push(lr);
}
- RecordWriteStub stub(object, value, address, remembered_set_action, fp_mode);
+ RecordWriteStub stub(isolate(), object, value, address, remembered_set_action,
+ fp_mode);
CallStub(&stub);
if (lr_status == kLRHasNotBeenSaved) {
Pop(lr);
ExternalReference check_stack_guard_state =
ExternalReference::re_check_stack_guard_state(isolate());
__ Mov(scratch, check_stack_guard_state);
- DirectCEntryStub stub;
+ DirectCEntryStub stub(isolate());
stub.GenerateCall(masm_, scratch);
// The input string may have been moved in memory, we need to reload it.
__ Mov(api_function_address, ref);
// Jump to stub.
- CallApiFunctionStub stub(is_store, call_data_undefined, argc);
+ CallApiFunctionStub stub(isolate, is_store, call_data_undefined, argc);
__ TailCallStub(&stub);
}
Representation representation) {
__ Mov(receiver(), reg);
if (kind() == Code::LOAD_IC) {
- LoadFieldStub stub(field.is_inobject(holder),
+ LoadFieldStub stub(isolate(),
+ field.is_inobject(holder),
field.translate(holder),
representation);
GenerateTailCall(masm(), stub.GetCode(isolate()));
} else {
- KeyedLoadFieldStub stub(field.is_inobject(holder),
+ KeyedLoadFieldStub stub(isolate(),
+ field.is_inobject(holder),
field.translate(holder),
representation);
GenerateTailCall(masm(), stub.GetCode(isolate()));
ExternalReference ref = ExternalReference(&fun, type, isolate());
__ Mov(getter_address_reg, ref);
- CallApiGetterStub stub;
+ CallApiGetterStub stub(isolate());
__ TailCallStub(&stub);
}
// Generate the uninitialized versions of the stub.
for (int op = Token::BIT_OR; op <= Token::MOD; ++op) {
for (int mode = NO_OVERWRITE; mode <= OVERWRITE_RIGHT; ++mode) {
- BinaryOpICStub stub(static_cast<Token::Value>(op),
+ BinaryOpICStub stub(isolate,
+ static_cast<Token::Value>(op),
static_cast<OverwriteMode>(mode));
stub.GetCode(isolate);
}
// static
void BinaryOpICStub::GenerateAheadOfTime(Isolate* isolate,
const BinaryOpIC::State& state) {
- BinaryOpICStub stub(state);
+ BinaryOpICStub stub(isolate, state);
stub.GetCode(isolate);
}
void BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(
Isolate* isolate, const BinaryOpIC::State& state) {
if (state.CouldCreateAllocationMementos()) {
- BinaryOpICWithAllocationSiteStub stub(state);
+ BinaryOpICWithAllocationSiteStub stub(isolate, state);
stub.GetCode(isolate);
}
}
void CreateAllocationSiteStub::GenerateAheadOfTime(Isolate* isolate) {
- CreateAllocationSiteStub stub;
+ CreateAllocationSiteStub stub(isolate);
stub.GetCode(isolate);
}
void StubFailureTrampolineStub::GenerateAheadOfTime(Isolate* isolate) {
- StubFailureTrampolineStub stub1(NOT_JS_FUNCTION_STUB_MODE);
- StubFailureTrampolineStub stub2(JS_FUNCTION_STUB_MODE);
+ StubFailureTrampolineStub stub1(isolate, NOT_JS_FUNCTION_STUB_MODE);
+ StubFailureTrampolineStub stub2(isolate, JS_FUNCTION_STUB_MODE);
stub1.GetCode(isolate);
stub2.GetCode(isolate);
}
void ArrayConstructorStubBase::InstallDescriptors(Isolate* isolate) {
- ArrayNoArgumentConstructorStub stub1(GetInitialFastElementsKind());
+ ArrayNoArgumentConstructorStub stub1(isolate, GetInitialFastElementsKind());
InstallDescriptor(isolate, &stub1);
- ArraySingleArgumentConstructorStub stub2(GetInitialFastElementsKind());
+ ArraySingleArgumentConstructorStub stub2(isolate,
+ GetInitialFastElementsKind());
InstallDescriptor(isolate, &stub2);
- ArrayNArgumentsConstructorStub stub3(GetInitialFastElementsKind());
+ ArrayNArgumentsConstructorStub stub3(isolate, GetInitialFastElementsKind());
InstallDescriptor(isolate, &stub3);
}
void NumberToStringStub::InstallDescriptors(Isolate* isolate) {
- NumberToStringStub stub;
+ NumberToStringStub stub(isolate);
InstallDescriptor(isolate, &stub);
}
void FastNewClosureStub::InstallDescriptors(Isolate* isolate) {
- FastNewClosureStub stub(STRICT, false);
+ FastNewClosureStub stub(isolate, STRICT, false);
InstallDescriptor(isolate, &stub);
}
void FastNewContextStub::InstallDescriptors(Isolate* isolate) {
- FastNewContextStub stub(FastNewContextStub::kMaximumSlots);
+ FastNewContextStub stub(isolate, FastNewContextStub::kMaximumSlots);
InstallDescriptor(isolate, &stub);
}
// static
void FastCloneShallowArrayStub::InstallDescriptors(Isolate* isolate) {
- FastCloneShallowArrayStub stub(FastCloneShallowArrayStub::CLONE_ELEMENTS,
+ FastCloneShallowArrayStub stub(isolate,
+ FastCloneShallowArrayStub::CLONE_ELEMENTS,
DONT_TRACK_ALLOCATION_SITE, 0);
InstallDescriptor(isolate, &stub);
}
// static
void BinaryOpICStub::InstallDescriptors(Isolate* isolate) {
- BinaryOpICStub stub(Token::ADD, NO_OVERWRITE);
+ BinaryOpICStub stub(isolate, Token::ADD, NO_OVERWRITE);
InstallDescriptor(isolate, &stub);
}
// static
void BinaryOpWithAllocationSiteStub::InstallDescriptors(Isolate* isolate) {
- BinaryOpWithAllocationSiteStub stub(Token::ADD, NO_OVERWRITE);
+ BinaryOpWithAllocationSiteStub stub(isolate, Token::ADD, NO_OVERWRITE);
InstallDescriptor(isolate, &stub);
}
// static
void StringAddStub::InstallDescriptors(Isolate* isolate) {
- StringAddStub stub(STRING_ADD_CHECK_NONE, NOT_TENURED);
+ StringAddStub stub(isolate, STRING_ADD_CHECK_NONE, NOT_TENURED);
InstallDescriptor(isolate, &stub);
}
// static
void RegExpConstructResultStub::InstallDescriptors(Isolate* isolate) {
- RegExpConstructResultStub stub;
+ RegExpConstructResultStub stub(isolate);
InstallDescriptor(isolate, &stub);
}
ArrayConstructorStub::ArrayConstructorStub(Isolate* isolate)
- : argument_count_(ANY) {
+ : PlatformCodeStub(isolate), argument_count_(ANY) {
ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
}
ArrayConstructorStub::ArrayConstructorStub(Isolate* isolate,
- int argument_count) {
+ int argument_count)
+ : PlatformCodeStub(isolate) {
if (argument_count == 0) {
argument_count_ = NONE;
} else if (argument_count == 1) {
void InternalArrayConstructorStubBase::InstallDescriptors(Isolate* isolate) {
- InternalArrayNoArgumentConstructorStub stub1(FAST_ELEMENTS);
+ InternalArrayNoArgumentConstructorStub stub1(isolate, FAST_ELEMENTS);
InstallDescriptor(isolate, &stub1);
- InternalArraySingleArgumentConstructorStub stub2(FAST_ELEMENTS);
+ InternalArraySingleArgumentConstructorStub stub2(isolate, FAST_ELEMENTS);
InstallDescriptor(isolate, &stub2);
- InternalArrayNArgumentsConstructorStub stub3(FAST_ELEMENTS);
+ InternalArrayNArgumentsConstructorStub stub3(isolate, FAST_ELEMENTS);
InstallDescriptor(isolate, &stub3);
}
InternalArrayConstructorStub::InternalArrayConstructorStub(
- Isolate* isolate) {
+ Isolate* isolate) : PlatformCodeStub(isolate) {
InternalArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
}
static const char* MajorName(Major major_key, bool allow_unknown_keys);
+ explicit CodeStub(Isolate* isolate) : isolate_(isolate) { }
virtual ~CodeStub() {}
static void GenerateStubsAheadOfTime(Isolate* isolate);
// Returns a name for logging/debugging purposes.
SmartArrayPointer<const char> GetName();
+ Isolate* isolate() const { return isolate_; }
+
protected:
static bool CanUseFPRegisters();
kStubMajorKeyBits, kStubMinorKeyBits> {}; // NOLINT
friend class BreakPointIterator;
+
+ Isolate* isolate_;
};
class PlatformCodeStub : public CodeStub {
public:
+ explicit PlatformCodeStub(Isolate* isolate) : CodeStub(isolate) { }
+
// Retrieve the code for the stub. Generate the code if needed.
virtual Handle<Code> GenerateCode(Isolate* isolate);
INITIALIZED
};
- explicit HydrogenCodeStub(InitializationState state = INITIALIZED) {
+ HydrogenCodeStub(Isolate* isolate, InitializationState state = INITIALIZED)
+ : CodeStub(isolate) {
is_uninitialized_ = (state == UNINITIALIZED);
}
class ToNumberStub: public HydrogenCodeStub {
public:
- ToNumberStub() { }
+ explicit ToNumberStub(Isolate* isolate) : HydrogenCodeStub(isolate) { }
virtual Handle<Code> GenerateCode(Isolate* isolate);
CodeStubInterfaceDescriptor* descriptor);
static void InstallDescriptors(Isolate* isolate) {
- ToNumberStub stub;
+ ToNumberStub stub(isolate);
stub.InitializeInterfaceDescriptor(
isolate,
isolate->code_stub_interface_descriptor(CodeStub::ToNumber));
class NumberToStringStub V8_FINAL : public HydrogenCodeStub {
public:
- NumberToStringStub() {}
+ explicit NumberToStringStub(Isolate* isolate) : HydrogenCodeStub(isolate) {}
virtual Handle<Code> GenerateCode(Isolate* isolate) V8_OVERRIDE;
class FastNewClosureStub : public HydrogenCodeStub {
public:
- explicit FastNewClosureStub(StrictMode strict_mode, bool is_generator)
- : strict_mode_(strict_mode),
- is_generator_(is_generator) { }
+ FastNewClosureStub(Isolate* isolate,
+ StrictMode strict_mode,
+ bool is_generator)
+ : HydrogenCodeStub(isolate),
+ strict_mode_(strict_mode),
+ is_generator_(is_generator) { }
virtual Handle<Code> GenerateCode(Isolate* isolate);
public:
static const int kMaximumSlots = 64;
- explicit FastNewContextStub(int slots) : slots_(slots) {
+ FastNewContextStub(Isolate* isolate, int slots)
+ : HydrogenCodeStub(isolate), slots_(slots) {
ASSERT(slots_ > 0 && slots_ <= kMaximumSlots);
}
static const int kFastCloneModeCount = LAST_CLONE_MODE + 1;
- FastCloneShallowArrayStub(Mode mode,
+ FastCloneShallowArrayStub(Isolate* isolate,
+ Mode mode,
AllocationSiteMode allocation_site_mode,
int length)
- : mode_(mode),
+ : HydrogenCodeStub(isolate),
+ mode_(mode),
allocation_site_mode_(allocation_site_mode),
length_((mode == COPY_ON_WRITE_ELEMENTS) ? 0 : length) {
ASSERT_GE(length_, 0);
// Maximum number of properties in copied object.
static const int kMaximumClonedProperties = 6;
- explicit FastCloneShallowObjectStub(int length) : length_(length) {
+ FastCloneShallowObjectStub(Isolate* isolate, int length)
+ : HydrogenCodeStub(isolate), length_(length) {
ASSERT_GE(length_, 0);
ASSERT_LE(length_, kMaximumClonedProperties);
}
class CreateAllocationSiteStub : public HydrogenCodeStub {
public:
- explicit CreateAllocationSiteStub() { }
+ explicit CreateAllocationSiteStub(Isolate* isolate)
+ : HydrogenCodeStub(isolate) { }
virtual Handle<Code> GenerateCode(Isolate* isolate);
kReturnTrueFalseObject = 1 << 2
};
- explicit InstanceofStub(Flags flags) : flags_(flags) { }
+ InstanceofStub(Isolate* isolate, Flags flags)
+ : PlatformCodeStub(isolate), flags_(flags) { }
static Register left();
static Register right();
public:
enum ExponentType { INTEGER, DOUBLE, TAGGED, ON_STACK };
- explicit MathPowStub(ExponentType exponent_type)
- : exponent_type_(exponent_type) { }
+ MathPowStub(Isolate* isolate, ExponentType exponent_type)
+ : PlatformCodeStub(isolate), exponent_type_(exponent_type) { }
virtual void Generate(MacroAssembler* masm);
private:
class ICStub: public PlatformCodeStub {
public:
- explicit ICStub(Code::Kind kind) : kind_(kind) { }
+ ICStub(Isolate* isolate, Code::Kind kind)
+ : PlatformCodeStub(isolate), kind_(kind) { }
virtual Code::Kind GetCodeKind() const { return kind_; }
virtual InlineCacheState GetICState() { return MONOMORPHIC; }
class FunctionPrototypeStub: public ICStub {
public:
- explicit FunctionPrototypeStub(Code::Kind kind) : ICStub(kind) { }
+ FunctionPrototypeStub(Isolate* isolate, Code::Kind kind)
+ : ICStub(isolate, kind) { }
virtual void Generate(MacroAssembler* masm);
private:
class StoreICStub: public ICStub {
public:
- StoreICStub(Code::Kind kind, StrictMode strict_mode)
- : ICStub(kind), strict_mode_(strict_mode) { }
+ StoreICStub(Isolate* isolate, Code::Kind kind, StrictMode strict_mode)
+ : ICStub(isolate, kind), strict_mode_(strict_mode) { }
protected:
virtual ExtraICState GetExtraICState() {
class HICStub: public HydrogenCodeStub {
public:
+ explicit HICStub(Isolate* isolate) : HydrogenCodeStub(isolate) { }
virtual Code::Kind GetCodeKind() const { return kind(); }
virtual InlineCacheState GetICState() { return MONOMORPHIC; }
virtual ExtraICState GetExtraICState() { return kind(); }
protected:
- HandlerStub() : HICStub() { }
+ explicit HandlerStub(Isolate* isolate) : HICStub(isolate) { }
virtual int NotMissMinorKey() { return bit_field_; }
int bit_field_;
};
class LoadFieldStub: public HandlerStub {
public:
- LoadFieldStub(bool inobject, int index, Representation representation) {
+ LoadFieldStub(Isolate* isolate,
+ bool inobject,
+ int index, Representation representation)
+ : HandlerStub(isolate) {
Initialize(Code::LOAD_IC, inobject, index, representation);
}
virtual Code::StubType GetStubType() { return Code::FAST; }
protected:
- LoadFieldStub() : HandlerStub() { }
+ explicit LoadFieldStub(Isolate* isolate) : HandlerStub(isolate) { }
void Initialize(Code::Kind kind,
bool inobject,
class StringLengthStub: public HandlerStub {
public:
- explicit StringLengthStub() : HandlerStub() {
+ explicit StringLengthStub(Isolate* isolate) : HandlerStub(isolate) {
Initialize(Code::LOAD_IC);
}
virtual Handle<Code> GenerateCode(Isolate* isolate);
class KeyedStringLengthStub: public StringLengthStub {
public:
- explicit KeyedStringLengthStub() : StringLengthStub() {
+ explicit KeyedStringLengthStub(Isolate* isolate) : StringLengthStub(isolate) {
Initialize(Code::KEYED_LOAD_IC);
}
virtual void InitializeInterfaceDescriptor(
class StoreGlobalStub : public HandlerStub {
public:
- explicit StoreGlobalStub(bool is_constant, bool check_global) {
+ StoreGlobalStub(Isolate* isolate, bool is_constant, bool check_global)
+ : HandlerStub(isolate) {
bit_field_ = IsConstantBits::encode(is_constant) |
CheckGlobalBits::encode(check_global);
}
class CallApiFunctionStub : public PlatformCodeStub {
public:
- CallApiFunctionStub(bool is_store,
+ CallApiFunctionStub(Isolate* isolate,
+ bool is_store,
bool call_data_undefined,
- int argc) {
+ int argc) : PlatformCodeStub(isolate) {
bit_field_ =
IsStoreBits::encode(is_store) |
CallDataUndefinedBits::encode(call_data_undefined) |
class CallApiGetterStub : public PlatformCodeStub {
public:
- CallApiGetterStub() {}
+ explicit CallApiGetterStub(Isolate* isolate) : PlatformCodeStub(isolate) {}
private:
virtual void Generate(MacroAssembler* masm) V8_OVERRIDE;
class KeyedLoadFieldStub: public LoadFieldStub {
public:
- KeyedLoadFieldStub(bool inobject, int index, Representation representation)
- : LoadFieldStub() {
+ KeyedLoadFieldStub(Isolate* isolate,
+ bool inobject,
+ int index, Representation representation)
+ : LoadFieldStub(isolate) {
Initialize(Code::KEYED_LOAD_IC, inobject, index, representation);
}
class BinaryOpICStub : public HydrogenCodeStub {
public:
- BinaryOpICStub(Token::Value op, OverwriteMode mode)
- : HydrogenCodeStub(UNINITIALIZED), state_(op, mode) {}
+ BinaryOpICStub(Isolate* isolate, Token::Value op, OverwriteMode mode)
+ : HydrogenCodeStub(isolate, UNINITIALIZED), state_(op, mode) {}
- explicit BinaryOpICStub(const BinaryOpIC::State& state) : state_(state) {}
+ BinaryOpICStub(Isolate* isolate, const BinaryOpIC::State& state)
+ : HydrogenCodeStub(isolate), state_(state) {}
static void GenerateAheadOfTime(Isolate* isolate);
// call support for stubs in Hydrogen.
class BinaryOpICWithAllocationSiteStub V8_FINAL : public PlatformCodeStub {
public:
- explicit BinaryOpICWithAllocationSiteStub(const BinaryOpIC::State& state)
- : state_(state) {}
+ BinaryOpICWithAllocationSiteStub(Isolate* isolate,
+ const BinaryOpIC::State& state)
+ : PlatformCodeStub(isolate), state_(state) {}
static void GenerateAheadOfTime(Isolate* isolate);
class BinaryOpWithAllocationSiteStub V8_FINAL : public BinaryOpICStub {
public:
- BinaryOpWithAllocationSiteStub(Token::Value op, OverwriteMode mode)
- : BinaryOpICStub(op, mode) {}
+ BinaryOpWithAllocationSiteStub(Isolate* isolate,
+ Token::Value op,
+ OverwriteMode mode)
+ : BinaryOpICStub(isolate, op, mode) {}
- explicit BinaryOpWithAllocationSiteStub(const BinaryOpIC::State& state)
- : BinaryOpICStub(state) {}
+ BinaryOpWithAllocationSiteStub(Isolate* isolate,
+ const BinaryOpIC::State& state)
+ : BinaryOpICStub(isolate, state) {}
virtual void InitializeInterfaceDescriptor(
Isolate* isolate, CodeStubInterfaceDescriptor* descriptor) V8_OVERRIDE;
class StringAddStub V8_FINAL : public HydrogenCodeStub {
public:
- StringAddStub(StringAddFlags flags, PretenureFlag pretenure_flag)
- : bit_field_(StringAddFlagsBits::encode(flags) |
+ StringAddStub(Isolate* isolate,
+ StringAddFlags flags,
+ PretenureFlag pretenure_flag)
+ : HydrogenCodeStub(isolate),
+ bit_field_(StringAddFlagsBits::encode(flags) |
PretenureFlagBits::encode(pretenure_flag)) {}
StringAddFlags flags() const {
class ICCompareStub: public PlatformCodeStub {
public:
- ICCompareStub(Token::Value op,
+ ICCompareStub(Isolate* isolate,
+ Token::Value op,
CompareIC::State left,
CompareIC::State right,
CompareIC::State handler)
- : op_(op),
+ : PlatformCodeStub(isolate),
+ op_(op),
left_(left),
right_(right),
state_(handler) {
Type* GetType(Zone* zone, Handle<Map> map = Handle<Map>());
Type* GetInputType(Zone* zone, Handle<Map> map);
- explicit CompareNilICStub(NilValue nil) : nil_value_(nil) { }
+ CompareNilICStub(Isolate* isolate, NilValue nil)
+ : HydrogenCodeStub(isolate), nil_value_(nil) { }
- CompareNilICStub(ExtraICState ic_state,
+ CompareNilICStub(Isolate* isolate,
+ ExtraICState ic_state,
InitializationState init_state = INITIALIZED)
- : HydrogenCodeStub(init_state),
+ : HydrogenCodeStub(isolate, init_state),
nil_value_(NilValueField::decode(ic_state)),
state_(State(TypesField::decode(ic_state))) {
}
static Handle<Code> GetUninitialized(Isolate* isolate,
NilValue nil) {
- return CompareNilICStub(nil, UNINITIALIZED).GetCode(isolate);
+ return CompareNilICStub(isolate, nil, UNINITIALIZED).GetCode(isolate);
}
virtual void InitializeInterfaceDescriptor(
CodeStubInterfaceDescriptor* descriptor);
static void InstallDescriptors(Isolate* isolate) {
- CompareNilICStub compare_stub(kNullValue, UNINITIALIZED);
+ CompareNilICStub compare_stub(isolate, kNullValue, UNINITIALIZED);
compare_stub.InitializeInterfaceDescriptor(
isolate,
isolate->code_stub_interface_descriptor(CodeStub::CompareNilIC));
void Print(StringStream* stream) const;
};
- CompareNilICStub(NilValue nil, InitializationState init_state)
- : HydrogenCodeStub(init_state), nil_value_(nil) { }
+ CompareNilICStub(Isolate* isolate,
+ NilValue nil,
+ InitializationState init_state)
+ : HydrogenCodeStub(isolate, init_state), nil_value_(nil) { }
class NilValueField : public BitField<NilValue, 0, 1> {};
class TypesField : public BitField<byte, 1, NUMBER_OF_TYPES> {};
class CEntryStub : public PlatformCodeStub {
public:
- explicit CEntryStub(int result_size,
- SaveFPRegsMode save_doubles = kDontSaveFPRegs)
- : result_size_(result_size), save_doubles_(save_doubles) { }
+ CEntryStub(Isolate* isolate,
+ int result_size,
+ SaveFPRegsMode save_doubles = kDontSaveFPRegs)
+ : PlatformCodeStub(isolate),
+ result_size_(result_size),
+ save_doubles_(save_doubles) { }
void Generate(MacroAssembler* masm);
class JSEntryStub : public PlatformCodeStub {
public:
- JSEntryStub() { }
+ explicit JSEntryStub(Isolate* isolate) : PlatformCodeStub(isolate) { }
void Generate(MacroAssembler* masm) { GenerateBody(masm, false); }
class JSConstructEntryStub : public JSEntryStub {
public:
- JSConstructEntryStub() { }
+ explicit JSConstructEntryStub(Isolate* isolate) : JSEntryStub(isolate) { }
void Generate(MacroAssembler* masm) { GenerateBody(masm, true); }
NEW_STRICT
};
- explicit ArgumentsAccessStub(Type type) : type_(type) { }
+ ArgumentsAccessStub(Isolate* isolate, Type type)
+ : PlatformCodeStub(isolate), type_(type) { }
private:
Type type_;
class RegExpExecStub: public PlatformCodeStub {
public:
- RegExpExecStub() { }
+ explicit RegExpExecStub(Isolate* isolate) : PlatformCodeStub(isolate) { }
private:
Major MajorKey() { return RegExpExec; }
class RegExpConstructResultStub V8_FINAL : public HydrogenCodeStub {
public:
- RegExpConstructResultStub() { }
+ explicit RegExpConstructResultStub(Isolate* isolate)
+ : HydrogenCodeStub(isolate) { }
virtual Handle<Code> GenerateCode(Isolate* isolate) V8_OVERRIDE;
class CallFunctionStub: public PlatformCodeStub {
public:
- CallFunctionStub(int argc, CallFunctionFlags flags)
- : argc_(argc), flags_(flags) { }
+ CallFunctionStub(Isolate* isolate, int argc, CallFunctionFlags flags)
+ : PlatformCodeStub(isolate), argc_(argc), flags_(flags) { }
void Generate(MacroAssembler* masm);
class CallConstructStub: public PlatformCodeStub {
public:
- explicit CallConstructStub(CallFunctionFlags flags) : flags_(flags) {}
+ CallConstructStub(Isolate* isolate, CallFunctionFlags flags)
+ : PlatformCodeStub(isolate), flags_(flags) {}
void Generate(MacroAssembler* masm);
class KeyedLoadDictionaryElementStub : public HydrogenCodeStub {
public:
- KeyedLoadDictionaryElementStub() {}
+ explicit KeyedLoadDictionaryElementStub(Isolate* isolate)
+ : HydrogenCodeStub(isolate) {}
virtual Handle<Code> GenerateCode(Isolate* isolate) V8_OVERRIDE;
class KeyedLoadDictionaryElementPlatformStub : public PlatformCodeStub {
public:
- KeyedLoadDictionaryElementPlatformStub() {}
+ explicit KeyedLoadDictionaryElementPlatformStub(Isolate* isolate)
+ : PlatformCodeStub(isolate) {}
void Generate(MacroAssembler* masm);
class DoubleToIStub : public PlatformCodeStub {
public:
- DoubleToIStub(Register source,
+ DoubleToIStub(Isolate* isolate,
+ Register source,
Register destination,
int offset,
bool is_truncating,
- bool skip_fastpath = false) : bit_field_(0) {
+ bool skip_fastpath = false)
+ : PlatformCodeStub(isolate), bit_field_(0) {
bit_field_ = SourceRegisterBits::encode(source.code()) |
DestinationRegisterBits::encode(destination.code()) |
OffsetBits::encode(offset) |
class KeyedLoadFastElementStub : public HydrogenCodeStub {
public:
- KeyedLoadFastElementStub(bool is_js_array, ElementsKind elements_kind) {
+ KeyedLoadFastElementStub(Isolate* isolate,
+ bool is_js_array,
+ ElementsKind elements_kind)
+ : HydrogenCodeStub(isolate) {
bit_field_ = ElementsKindBits::encode(elements_kind) |
IsJSArrayBits::encode(is_js_array);
}
class KeyedStoreFastElementStub : public HydrogenCodeStub {
public:
- KeyedStoreFastElementStub(bool is_js_array,
+ KeyedStoreFastElementStub(Isolate* isolate,
+ bool is_js_array,
ElementsKind elements_kind,
- KeyedAccessStoreMode mode) {
+ KeyedAccessStoreMode mode)
+ : HydrogenCodeStub(isolate) {
bit_field_ = ElementsKindBits::encode(elements_kind) |
IsJSArrayBits::encode(is_js_array) |
StoreModeBits::encode(mode);
class TransitionElementsKindStub : public HydrogenCodeStub {
public:
- TransitionElementsKindStub(ElementsKind from_kind,
+ TransitionElementsKindStub(Isolate* isolate,
+ ElementsKind from_kind,
ElementsKind to_kind,
- bool is_js_array) {
+ bool is_js_array) : HydrogenCodeStub(isolate) {
bit_field_ = FromKindBits::encode(from_kind) |
ToKindBits::encode(to_kind) |
IsJSArrayBits::encode(is_js_array);
class ArrayConstructorStubBase : public HydrogenCodeStub {
public:
- ArrayConstructorStubBase(ElementsKind kind,
- AllocationSiteOverrideMode override_mode) {
+ ArrayConstructorStubBase(Isolate* isolate,
+ ElementsKind kind,
+ AllocationSiteOverrideMode override_mode)
+ : HydrogenCodeStub(isolate) {
// It only makes sense to override local allocation site behavior
// if there is a difference between the global allocation site policy
// for an ElementsKind and the desired usage of the stub.
class ArrayNoArgumentConstructorStub : public ArrayConstructorStubBase {
public:
ArrayNoArgumentConstructorStub(
+ Isolate* isolate,
ElementsKind kind,
AllocationSiteOverrideMode override_mode = DONT_OVERRIDE)
- : ArrayConstructorStubBase(kind, override_mode) {
+ : ArrayConstructorStubBase(isolate, kind, override_mode) {
}
virtual Handle<Code> GenerateCode(Isolate* isolate);
class ArraySingleArgumentConstructorStub : public ArrayConstructorStubBase {
public:
ArraySingleArgumentConstructorStub(
+ Isolate* isolate,
ElementsKind kind,
AllocationSiteOverrideMode override_mode = DONT_OVERRIDE)
- : ArrayConstructorStubBase(kind, override_mode) {
+ : ArrayConstructorStubBase(isolate, kind, override_mode) {
}
virtual Handle<Code> GenerateCode(Isolate* isolate);
class ArrayNArgumentsConstructorStub : public ArrayConstructorStubBase {
public:
ArrayNArgumentsConstructorStub(
+ Isolate* isolate,
ElementsKind kind,
AllocationSiteOverrideMode override_mode = DONT_OVERRIDE)
- : ArrayConstructorStubBase(kind, override_mode) {
+ : ArrayConstructorStubBase(isolate, kind, override_mode) {
}
virtual Handle<Code> GenerateCode(Isolate* isolate);
class InternalArrayConstructorStubBase : public HydrogenCodeStub {
public:
- explicit InternalArrayConstructorStubBase(ElementsKind kind) {
+ InternalArrayConstructorStubBase(Isolate* isolate, ElementsKind kind)
+ : HydrogenCodeStub(isolate) {
kind_ = kind;
}
class InternalArrayNoArgumentConstructorStub : public
InternalArrayConstructorStubBase {
public:
- explicit InternalArrayNoArgumentConstructorStub(ElementsKind kind)
- : InternalArrayConstructorStubBase(kind) { }
+ InternalArrayNoArgumentConstructorStub(Isolate* isolate,
+ ElementsKind kind)
+ : InternalArrayConstructorStubBase(isolate, kind) { }
virtual Handle<Code> GenerateCode(Isolate* isolate);
class InternalArraySingleArgumentConstructorStub : public
InternalArrayConstructorStubBase {
public:
- explicit InternalArraySingleArgumentConstructorStub(ElementsKind kind)
- : InternalArrayConstructorStubBase(kind) { }
+ InternalArraySingleArgumentConstructorStub(Isolate* isolate,
+ ElementsKind kind)
+ : InternalArrayConstructorStubBase(isolate, kind) { }
virtual Handle<Code> GenerateCode(Isolate* isolate);
class InternalArrayNArgumentsConstructorStub : public
InternalArrayConstructorStubBase {
public:
- explicit InternalArrayNArgumentsConstructorStub(ElementsKind kind)
- : InternalArrayConstructorStubBase(kind) { }
+ InternalArrayNArgumentsConstructorStub(Isolate* isolate, ElementsKind kind)
+ : InternalArrayConstructorStubBase(isolate, kind) { }
virtual Handle<Code> GenerateCode(Isolate* isolate);
class KeyedStoreElementStub : public PlatformCodeStub {
public:
- KeyedStoreElementStub(bool is_js_array,
+ KeyedStoreElementStub(Isolate* isolate,
+ bool is_js_array,
ElementsKind elements_kind,
KeyedAccessStoreMode store_mode)
- : is_js_array_(is_js_array),
+ : PlatformCodeStub(isolate),
+ is_js_array_(is_js_array),
elements_kind_(elements_kind),
store_mode_(store_mode),
fp_registers_(CanUseFPRegisters()) { }
static Types Generic() { return Types((1 << NUMBER_OF_TYPES) - 1); }
};
- explicit ToBooleanStub(Types types = Types())
- : types_(types) { }
- explicit ToBooleanStub(ExtraICState state)
- : types_(static_cast<byte>(state)) { }
+ ToBooleanStub(Isolate* isolate, Types types = Types())
+ : HydrogenCodeStub(isolate), types_(types) { }
+ ToBooleanStub(Isolate* isolate, ExtraICState state)
+ : HydrogenCodeStub(isolate), types_(static_cast<byte>(state)) { }
bool UpdateStatus(Handle<Object> object);
Types GetTypes() { return types_; }
virtual bool SometimesSetsUpAFrame() { return false; }
static void InstallDescriptors(Isolate* isolate) {
- ToBooleanStub stub;
+ ToBooleanStub stub(isolate);
stub.InitializeInterfaceDescriptor(
isolate,
isolate->code_stub_interface_descriptor(CodeStub::ToBoolean));
}
static Handle<Code> GetUninitialized(Isolate* isolate) {
- return ToBooleanStub(UNINITIALIZED).GetCode(isolate);
+ return ToBooleanStub(isolate, UNINITIALIZED).GetCode(isolate);
}
virtual ExtraICState GetExtraICState() {
Major MajorKey() { return ToBoolean; }
int NotMissMinorKey() { return GetExtraICState(); }
- explicit ToBooleanStub(InitializationState init_state) :
- HydrogenCodeStub(init_state) {}
+ ToBooleanStub(Isolate* isolate, InitializationState init_state) :
+ HydrogenCodeStub(isolate, init_state) {}
Types types_;
};
class ElementsTransitionAndStoreStub : public HydrogenCodeStub {
public:
- ElementsTransitionAndStoreStub(ElementsKind from_kind,
+ ElementsTransitionAndStoreStub(Isolate* isolate,
+ ElementsKind from_kind,
ElementsKind to_kind,
bool is_jsarray,
KeyedAccessStoreMode store_mode)
- : from_kind_(from_kind),
+ : HydrogenCodeStub(isolate),
+ from_kind_(from_kind),
to_kind_(to_kind),
is_jsarray_(is_jsarray),
store_mode_(store_mode) {}
class StoreArrayLiteralElementStub : public PlatformCodeStub {
public:
- StoreArrayLiteralElementStub()
- : fp_registers_(CanUseFPRegisters()) { }
+ explicit StoreArrayLiteralElementStub(Isolate* isolate)
+ : PlatformCodeStub(isolate), fp_registers_(CanUseFPRegisters()) { }
private:
class FPRegisters: public BitField<bool, 0, 1> {};
class StubFailureTrampolineStub : public PlatformCodeStub {
public:
- explicit StubFailureTrampolineStub(StubFunctionMode function_mode)
- : fp_registers_(CanUseFPRegisters()), function_mode_(function_mode) {}
+ StubFailureTrampolineStub(Isolate* isolate, StubFunctionMode function_mode)
+ : PlatformCodeStub(isolate),
+ fp_registers_(CanUseFPRegisters()),
+ function_mode_(function_mode) {}
static void GenerateAheadOfTime(Isolate* isolate);
class ProfileEntryHookStub : public PlatformCodeStub {
public:
- explicit ProfileEntryHookStub() {}
+ explicit ProfileEntryHookStub(Isolate* isolate) : PlatformCodeStub(isolate) {}
// The profile entry hook function is not allowed to cause a GC.
virtual bool SometimesSetsUpAFrame() { return false; }
// Compute this frame's PC, state, and continuation.
Code* trampoline = NULL;
StubFunctionMode function_mode = descriptor->function_mode_;
- StubFailureTrampolineStub(function_mode).FindCodeInCache(&trampoline,
+ StubFailureTrampolineStub(isolate_,
+ function_mode).FindCodeInCache(&trampoline,
isolate_);
ASSERT(trampoline != NULL);
output_frame->SetPc(reinterpret_cast<intptr_t>(
Code* StubFailureTrampolineFrame::unchecked_code() const {
Code* trampoline;
- StubFailureTrampolineStub(NOT_JS_FUNCTION_STUB_MODE).
+ StubFailureTrampolineStub(isolate(), NOT_JS_FUNCTION_STUB_MODE).
FindCodeInCache(&trampoline, isolate());
if (trampoline->contains(pc())) {
return trampoline;
}
- StubFailureTrampolineStub(JS_FUNCTION_STUB_MODE).
+ StubFailureTrampolineStub(isolate(), JS_FUNCTION_STUB_MODE).
FindCodeInCache(&trampoline, isolate());
if (trampoline->contains(pc())) {
return trampoline;
void Heap::CreateJSEntryStub() {
- JSEntryStub stub;
+ JSEntryStub stub(isolate());
set_js_entry_code(*stub.GetCode(isolate()));
}
void Heap::CreateJSConstructEntryStub() {
- JSConstructEntryStub stub;
+ JSConstructEntryStub stub(isolate());
set_js_construct_entry_code(*stub.GetCode(isolate()));
}
CallInterfaceDescriptor* descriptor =
isolate()->call_descriptor(Isolate::ApiFunctionCall);
- CallApiFunctionStub stub(is_store, call_data_is_undefined, argc);
+ CallApiFunctionStub stub(isolate(), is_store, call_data_is_undefined, argc);
Handle<Code> code = stub.GetCode(isolate());
HConstant* code_value = Add<HConstant>(code);
if (is_construct) {
// No type feedback cell is available
__ mov(ebx, masm->isolate()->factory()->undefined_value());
- CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
+ CallConstructStub stub(masm->isolate(), NO_CALL_FUNCTION_FLAGS);
__ CallStub(&stub);
} else {
ParameterCount actual(eax);
void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) {
// Update the static counter each time a new code stub is generated.
- Isolate* isolate = masm->isolate();
- isolate->counters()->code_stubs()->Increment();
+ isolate()->counters()->code_stubs()->Increment();
- CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(isolate);
+ CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(isolate());
int param_count = descriptor->register_param_count_;
{
// Call the runtime system in a fresh internal frame.
AllowExternalCallThatCantCauseGC scope(masm);
__ PrepareCallCFunction(argument_count, ecx);
__ mov(Operand(esp, 0 * kPointerSize),
- Immediate(ExternalReference::isolate_address(masm->isolate())));
+ Immediate(ExternalReference::isolate_address(isolate())));
__ CallCFunction(
- ExternalReference::store_buffer_overflow_function(masm->isolate()),
+ ExternalReference::store_buffer_overflow_function(isolate()),
argument_count);
if (save_doubles_ == kSaveFPRegs) {
CpuFeatureScope scope(masm, SSE2);
void MathPowStub::Generate(MacroAssembler* masm) {
CpuFeatureScope use_sse2(masm, SSE2);
- Factory* factory = masm->isolate()->factory();
+ Factory* factory = isolate()->factory();
const Register exponent = eax;
const Register base = edx;
const Register scratch = ecx;
__ Cvtsi2sd(double_exponent, exponent);
// Returning or bailing out.
- Counters* counters = masm->isolate()->counters();
+ Counters* counters = isolate()->counters();
if (exponent_type_ == ON_STACK) {
// The arguments are still on the stack.
__ bind(&call_runtime);
__ movsd(Operand(esp, 0 * kDoubleSize), double_base);
__ movsd(Operand(esp, 1 * kDoubleSize), double_exponent);
__ CallCFunction(
- ExternalReference::power_double_double_function(masm->isolate()), 4);
+ ExternalReference::power_double_double_function(isolate()), 4);
}
// Return value is in st(0) on ia32.
// Store it into the (fixed) result register.
Label miss;
if (kind() == Code::KEYED_LOAD_IC) {
- __ cmp(ecx, Immediate(masm->isolate()->factory()->prototype_string()));
+ __ cmp(ecx, Immediate(isolate()->factory()->prototype_string()));
__ j(not_equal, &miss);
}
void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
- Isolate* isolate = masm->isolate();
-
// esp[0] : return address
// esp[4] : number of parameters (tagged)
// esp[8] : receiver displacement
__ j(zero, &skip_parameter_map);
__ mov(FieldOperand(edi, FixedArray::kMapOffset),
- Immediate(isolate->factory()->sloppy_arguments_elements_map()));
+ Immediate(isolate()->factory()->sloppy_arguments_elements_map()));
__ lea(eax, Operand(ebx, reinterpret_cast<intptr_t>(Smi::FromInt(2))));
__ mov(FieldOperand(edi, FixedArray::kLengthOffset), eax);
__ mov(FieldOperand(edi, FixedArray::kHeaderSize + 0 * kPointerSize), esi);
__ mov(ebx, Immediate(Smi::FromInt(Context::MIN_CONTEXT_SLOTS)));
__ add(ebx, Operand(esp, 4 * kPointerSize));
__ sub(ebx, eax);
- __ mov(ecx, isolate->factory()->the_hole_value());
+ __ mov(ecx, isolate()->factory()->the_hole_value());
__ mov(edx, edi);
__ lea(edi, Operand(edi, eax, times_2, kParameterMapHeaderSize));
// eax = loop variable (tagged)
// esp[16] = address of receiver argument
// Copy arguments header and remaining slots (if there are any).
__ mov(FieldOperand(edi, FixedArray::kMapOffset),
- Immediate(isolate->factory()->fixed_array_map()));
+ Immediate(isolate()->factory()->fixed_array_map()));
__ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx);
Label arguments_loop, arguments_test;
void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
- Isolate* isolate = masm->isolate();
-
// esp[0] : return address
// esp[4] : number of parameters
// esp[8] : receiver displacement
__ lea(edi, Operand(eax, Heap::kStrictArgumentsObjectSize));
__ mov(FieldOperand(eax, JSObject::kElementsOffset), edi);
__ mov(FieldOperand(edi, FixedArray::kMapOffset),
- Immediate(isolate->factory()->fixed_array_map()));
+ Immediate(isolate()->factory()->fixed_array_map()));
__ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx);
// Untag the length for the loop below.
static const int kJSRegExpOffset = 4 * kPointerSize;
Label runtime;
- Factory* factory = masm->isolate()->factory();
+ Factory* factory = isolate()->factory();
// Ensure that a RegExp stack is allocated.
ExternalReference address_of_regexp_stack_memory_address =
- ExternalReference::address_of_regexp_stack_memory_address(
- masm->isolate());
+ ExternalReference::address_of_regexp_stack_memory_address(isolate());
ExternalReference address_of_regexp_stack_memory_size =
- ExternalReference::address_of_regexp_stack_memory_size(masm->isolate());
+ ExternalReference::address_of_regexp_stack_memory_size(isolate());
__ mov(ebx, Operand::StaticVariable(address_of_regexp_stack_memory_size));
__ test(ebx, ebx);
__ j(zero, &runtime);
// edx: code
// ecx: encoding of subject string (1 if ASCII, 0 if two_byte);
// All checks done. Now push arguments for native regexp code.
- Counters* counters = masm->isolate()->counters();
+ Counters* counters = isolate()->counters();
__ IncrementCounter(counters->regexp_entry_native(), 1);
// Isolates: note we add an additional parameter here (isolate pointer).
// Argument 9: Pass current isolate address.
__ mov(Operand(esp, 8 * kPointerSize),
- Immediate(ExternalReference::isolate_address(masm->isolate())));
+ Immediate(ExternalReference::isolate_address(isolate())));
// Argument 8: Indicate that this is a direct call from JavaScript.
__ mov(Operand(esp, 7 * kPointerSize), Immediate(1));
// Argument 5: static offsets vector buffer.
__ mov(Operand(esp, 4 * kPointerSize),
Immediate(ExternalReference::address_of_static_offsets_vector(
- masm->isolate())));
+ isolate())));
// Argument 2: Previous index.
__ SmiUntag(ebx);
// haven't created the exception yet. Handle that in the runtime system.
// TODO(592): Rerunning the RegExp to get the stack overflow exception.
ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
- masm->isolate());
- __ mov(edx, Immediate(masm->isolate()->factory()->the_hole_value()));
+ isolate());
+ __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
__ mov(eax, Operand::StaticVariable(pending_exception));
__ cmp(edx, eax);
__ j(equal, &runtime);
// Get the static offsets vector filled by the native regexp code.
ExternalReference address_of_static_offsets_vector =
- ExternalReference::address_of_static_offsets_vector(masm->isolate());
+ ExternalReference::address_of_static_offsets_vector(isolate());
__ mov(ecx, Immediate(address_of_static_offsets_vector));
// ebx: last_match_info backing store (FixedArray)
// Check for undefined. undefined OP undefined is false even though
// undefined == undefined.
Label check_for_nan;
- __ cmp(edx, masm->isolate()->factory()->undefined_value());
+ __ cmp(edx, isolate()->factory()->undefined_value());
__ j(not_equal, &check_for_nan, Label::kNear);
__ Move(eax, Immediate(Smi::FromInt(NegativeComparisonResult(cc))));
__ ret(0);
// Test for NaN. Compare heap numbers in a general way,
// to hanlde NaNs correctly.
__ cmp(FieldOperand(edx, HeapObject::kMapOffset),
- Immediate(masm->isolate()->factory()->heap_number_map()));
+ Immediate(isolate()->factory()->heap_number_map()));
__ j(equal, &generic_heap_number_comparison, Label::kNear);
if (cc != equal) {
// Call runtime on identical JSObjects. Otherwise return equal.
// Check if the non-smi operand is a heap number.
__ cmp(FieldOperand(ebx, HeapObject::kMapOffset),
- Immediate(masm->isolate()->factory()->heap_number_map()));
+ Immediate(isolate()->factory()->heap_number_map()));
// If heap number, handle it in the slow case.
__ j(equal, &slow, Label::kNear);
// Return non-equal (ebx is not zero)
// If we didn't have a matching function, and we didn't find the megamorph
// sentinel, then we have in the slot either some other function or an
// AllocationSite. Do a map check on the object in ecx.
- Handle<Map> allocation_site_map =
- masm->isolate()->factory()->allocation_site_map();
+ Handle<Map> allocation_site_map = isolate->factory()->allocation_site_map();
__ cmp(FieldOperand(ecx, 0), Immediate(allocation_site_map));
__ j(not_equal, &miss);
__ push(edx);
__ push(ebx);
- CreateAllocationSiteStub create_stub;
+ CreateAllocationSiteStub create_stub(isolate);
__ CallStub(&create_stub);
__ pop(ebx);
// edx : (only if ebx is not the megamorphic symbol) slot in feedback
// vector (Smi)
// edi : the function to call
- Isolate* isolate = masm->isolate();
Label slow, non_function, wrap, cont;
if (NeedsChecks()) {
// Type information was updated. Because we may call Array, which
// expects either undefined or an AllocationSite in ebx we need
// to set ebx to undefined.
- __ mov(ebx, Immediate(isolate->factory()->undefined_value()));
+ __ mov(ebx, Immediate(isolate()->factory()->undefined_value()));
}
}
// object (megamorphic symbol) so no write barrier is needed.
__ mov(FieldOperand(ebx, edx, times_half_pointer_size,
FixedArray::kHeaderSize),
- Immediate(TypeFeedbackInfo::MegamorphicSentinel(isolate)));
+ Immediate(TypeFeedbackInfo::MegamorphicSentinel(isolate())));
}
// Check for function proxy.
__ CmpInstanceType(ecx, JS_FUNCTION_PROXY_TYPE);
__ Move(ebx, Immediate(0));
__ GetBuiltinEntry(edx, Builtins::CALL_FUNCTION_PROXY);
{
- Handle<Code> adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline();
+ Handle<Code> adaptor =
+ isolate()->builtins()->ArgumentsAdaptorTrampoline();
__ jmp(adaptor, RelocInfo::CODE_TARGET);
}
__ Move(eax, Immediate(argc_));
__ Move(ebx, Immediate(0));
__ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION);
- Handle<Code> adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline();
+ Handle<Code> adaptor = isolate()->builtins()->ArgumentsAdaptorTrampoline();
__ jmp(adaptor, RelocInfo::CODE_TARGET);
}
__ mov(ebx, FieldOperand(ebx, edx, times_half_pointer_size,
FixedArray::kHeaderSize));
Handle<Map> allocation_site_map =
- masm->isolate()->factory()->allocation_site_map();
+ isolate()->factory()->allocation_site_map();
__ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map));
__ j(equal, &feedback_register_initialized);
- __ mov(ebx, masm->isolate()->factory()->undefined_value());
+ __ mov(ebx, isolate()->factory()->undefined_value());
__ bind(&feedback_register_initialized);
}
// Set expected number of arguments to zero (not changing eax).
__ Move(ebx, Immediate(0));
Handle<Code> arguments_adaptor =
- masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
+ isolate()->builtins()->ArgumentsAdaptorTrampoline();
__ jmp(arguments_adaptor, RelocInfo::CODE_TARGET);
}
void CodeStub::GenerateFPStubs(Isolate* isolate) {
if (CpuFeatures::IsSupported(SSE2)) {
- CEntryStub save_doubles(1, kSaveFPRegs);
+ CEntryStub save_doubles(isolate, 1, kSaveFPRegs);
// Stubs might already be in the snapshot, detect that and don't regenerate,
// which would lead to code stub initialization state being messed up.
Code* save_doubles_code;
void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
- CEntryStub stub(1, kDontSaveFPRegs);
+ CEntryStub stub(isolate, 1, kDontSaveFPRegs);
stub.GetCode(isolate);
}
// Result returned in eax, or eax+edx if result_size_ is 2.
- Isolate* isolate = masm->isolate();
-
// Check stack alignment.
if (FLAG_debug_code) {
__ CheckStackAlignment();
__ mov(Operand(esp, 0 * kPointerSize), edi); // argc.
__ mov(Operand(esp, 1 * kPointerSize), esi); // argv.
__ mov(Operand(esp, 2 * kPointerSize),
- Immediate(ExternalReference::isolate_address(isolate)));
+ Immediate(ExternalReference::isolate_address(isolate())));
__ call(ebx);
// Result is in eax or edx:eax - do not destroy these registers!
// lead to crashes in the IC code later.
if (FLAG_debug_code) {
Label okay;
- __ cmp(eax, isolate->factory()->the_hole_value());
+ __ cmp(eax, isolate()->factory()->the_hole_value());
__ j(not_equal, &okay, Label::kNear);
__ int3();
__ bind(&okay);
// Check result for exception sentinel.
Label exception_returned;
- __ cmp(eax, isolate->factory()->exception());
+ __ cmp(eax, isolate()->factory()->exception());
__ j(equal, &exception_returned);
ExternalReference pending_exception_address(
- Isolate::kPendingExceptionAddress, isolate);
+ Isolate::kPendingExceptionAddress, isolate());
// Check that there is no pending exception, otherwise we
// should have returned the exception sentinel.
if (FLAG_debug_code) {
__ push(edx);
- __ mov(edx, Immediate(isolate->factory()->the_hole_value()));
+ __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
Label okay;
__ cmp(edx, Operand::StaticVariable(pending_exception_address));
// Cannot use check here as it attempts to generate call into runtime.
__ mov(eax, Operand::StaticVariable(pending_exception_address));
// Clear the pending exception.
- __ mov(edx, Immediate(isolate->factory()->the_hole_value()));
+ __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
__ mov(Operand::StaticVariable(pending_exception_address), edx);
// Special handling of termination exceptions which are uncatchable
// by javascript code.
Label throw_termination_exception;
- __ cmp(eax, isolate->factory()->termination_exception());
+ __ cmp(eax, isolate()->factory()->termination_exception());
__ j(equal, &throw_termination_exception);
// Handle normal exception.
__ push(ebx);
// Save copies of the top frame descriptor on the stack.
- ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, masm->isolate());
+ ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, isolate());
__ push(Operand::StaticVariable(c_entry_fp));
// If this is the outermost JS call, set js_entry_sp value.
- ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress,
- masm->isolate());
+ ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate());
__ cmp(Operand::StaticVariable(js_entry_sp), Immediate(0));
__ j(not_equal, ¬_outermost_js, Label::kNear);
__ mov(Operand::StaticVariable(js_entry_sp), ebp);
// Caught exception: Store result (exception) in the pending exception
// field in the JSEnv and return a failure sentinel.
ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
- masm->isolate());
+ isolate());
__ mov(Operand::StaticVariable(pending_exception), eax);
- __ mov(eax, Immediate(masm->isolate()->factory()->exception()));
+ __ mov(eax, Immediate(isolate()->factory()->exception()));
__ jmp(&exit);
// Invoke: Link this frame into the handler chain. There's only one
__ PushTryHandler(StackHandler::JS_ENTRY, 0);
// Clear any pending exceptions.
- __ mov(edx, Immediate(masm->isolate()->factory()->the_hole_value()));
+ __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
__ mov(Operand::StaticVariable(pending_exception), edx);
// Fake a receiver (NULL).
// builtin stubs may not have been generated yet.
if (is_construct) {
ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
- masm->isolate());
+ isolate());
__ mov(edx, Immediate(construct_entry));
} else {
- ExternalReference entry(Builtins::kJSEntryTrampoline,
- masm->isolate());
+ ExternalReference entry(Builtins::kJSEntryTrampoline, isolate());
__ mov(edx, Immediate(entry));
}
__ mov(edx, Operand(edx, 0)); // deref address
// Restore the top frame descriptor from the stack.
__ pop(Operand::StaticVariable(ExternalReference(
- Isolate::kCEntryFPAddress,
- masm->isolate())));
+ Isolate::kCEntryFPAddress, isolate())));
// Restore callee-saved registers (C calling conventions).
__ pop(ebx);
__ bind(&loop);
__ cmp(scratch, prototype);
__ j(equal, &is_instance, Label::kNear);
- Factory* factory = masm->isolate()->factory();
+ Factory* factory = isolate()->factory();
__ cmp(scratch, Immediate(factory->null_value()));
__ j(equal, &is_not_instance, Label::kNear);
__ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
// Longer than original string's length or negative: unsafe arguments.
__ j(above, &runtime);
// Return original string.
- Counters* counters = masm->isolate()->counters();
+ Counters* counters = isolate()->counters();
__ IncrementCounter(counters->sub_string_native(), 1);
__ ret(3 * kPointerSize);
__ bind(¬_original_string);
__ test(ebx, Immediate(kIsIndirectStringMask));
__ j(zero, &seq_or_external_string, Label::kNear);
- Factory* factory = masm->isolate()->factory();
+ Factory* factory = isolate()->factory();
__ test(ebx, Immediate(kSlicedNotConsMask));
__ j(not_zero, &sliced_string, Label::kNear);
// Cons string. Check whether it is flat, then fetch first part.
STATIC_ASSERT(EQUAL == 0);
STATIC_ASSERT(kSmiTag == 0);
__ Move(eax, Immediate(Smi::FromInt(EQUAL)));
- __ IncrementCounter(masm->isolate()->counters()->string_compare_native(), 1);
+ __ IncrementCounter(isolate()->counters()->string_compare_native(), 1);
__ ret(2 * kPointerSize);
__ bind(¬_same);
// -- eax : right
// -- esp[0] : return address
// -----------------------------------
- Isolate* isolate = masm->isolate();
// Load ecx with the allocation site. We stick an undefined dummy value here
// and replace it with the real allocation site later when we instantiate this
// stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate().
- __ mov(ecx, handle(isolate->heap()->undefined_value()));
+ __ mov(ecx, handle(isolate()->heap()->undefined_value()));
// Make sure that we actually patched the allocation site.
if (FLAG_debug_code) {
__ test(ecx, Immediate(kSmiTagMask));
__ Assert(not_equal, kExpectedAllocationSite);
__ cmp(FieldOperand(ecx, HeapObject::kMapOffset),
- isolate->factory()->allocation_site_map());
+ isolate()->factory()->allocation_site_map());
__ Assert(equal, kExpectedAllocationSite);
}
// Tail call into the stub that handles binary operations with allocation
// sites.
- BinaryOpWithAllocationSiteStub stub(state_);
+ BinaryOpWithAllocationSiteStub stub(isolate(), state_);
__ TailCallStub(&stub);
}
Label done, left, left_smi, right_smi;
__ JumpIfSmi(eax, &right_smi, Label::kNear);
__ cmp(FieldOperand(eax, HeapObject::kMapOffset),
- masm->isolate()->factory()->heap_number_map());
+ isolate()->factory()->heap_number_map());
__ j(not_equal, &maybe_undefined1, Label::kNear);
__ movsd(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
__ jmp(&left, Label::kNear);
__ bind(&left);
__ JumpIfSmi(edx, &left_smi, Label::kNear);
__ cmp(FieldOperand(edx, HeapObject::kMapOffset),
- masm->isolate()->factory()->heap_number_map());
+ isolate()->factory()->heap_number_map());
__ j(not_equal, &maybe_undefined2, Label::kNear);
__ movsd(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
__ jmp(&done);
__ JumpIfSmi(ecx, &generic_stub, Label::kNear);
__ cmp(FieldOperand(eax, HeapObject::kMapOffset),
- masm->isolate()->factory()->heap_number_map());
+ isolate()->factory()->heap_number_map());
__ j(not_equal, &maybe_undefined1, Label::kNear);
__ cmp(FieldOperand(edx, HeapObject::kMapOffset),
- masm->isolate()->factory()->heap_number_map());
+ isolate()->factory()->heap_number_map());
__ j(not_equal, &maybe_undefined2, Label::kNear);
}
__ bind(&unordered);
__ bind(&generic_stub);
- ICCompareStub stub(op_, CompareIC::GENERIC, CompareIC::GENERIC,
+ ICCompareStub stub(isolate(), op_, CompareIC::GENERIC, CompareIC::GENERIC,
CompareIC::GENERIC);
- __ jmp(stub.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
+ __ jmp(stub.GetCode(isolate()), RelocInfo::CODE_TARGET);
__ bind(&maybe_undefined1);
if (Token::IsOrderedRelationalCompareOp(op_)) {
- __ cmp(eax, Immediate(masm->isolate()->factory()->undefined_value()));
+ __ cmp(eax, Immediate(isolate()->factory()->undefined_value()));
__ j(not_equal, &miss);
__ JumpIfSmi(edx, &unordered);
__ CmpObjectType(edx, HEAP_NUMBER_TYPE, ecx);
__ bind(&maybe_undefined2);
if (Token::IsOrderedRelationalCompareOp(op_)) {
- __ cmp(edx, Immediate(masm->isolate()->factory()->undefined_value()));
+ __ cmp(edx, Immediate(isolate()->factory()->undefined_value()));
__ j(equal, &unordered);
}
{
// Call the runtime system in a fresh internal frame.
ExternalReference miss = ExternalReference(IC_Utility(IC::kCompareIC_Miss),
- masm->isolate());
+ isolate());
FrameScope scope(masm, StackFrame::INTERNAL);
__ push(edx); // Preserve edx and eax.
__ push(eax);
__ bind(&good);
}
- NameDictionaryLookupStub stub(properties, r0, r0, NEGATIVE_LOOKUP);
+ NameDictionaryLookupStub stub(masm->isolate(), properties, r0, r0,
+ NEGATIVE_LOOKUP);
__ push(Immediate(Handle<Object>(name)));
__ push(Immediate(name->Hash()));
__ CallStub(&stub);
__ j(equal, done);
}
- NameDictionaryLookupStub stub(elements, r1, r0, POSITIVE_LOOKUP);
+ NameDictionaryLookupStub stub(masm->isolate(), elements, r1, r0,
+ POSITIVE_LOOKUP);
__ push(name);
__ mov(r0, FieldOperand(name, Name::kHashFieldOffset));
__ shr(r0, Name::kHashShift);
index_,
times_pointer_size,
kElementsStartOffset - kHeapObjectTag));
- __ cmp(scratch, masm->isolate()->factory()->undefined_value());
+ __ cmp(scratch, isolate()->factory()->undefined_value());
__ j(equal, ¬_in_dictionary);
// Stop if found the property.
void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
Isolate* isolate) {
- StoreBufferOverflowStub stub(kDontSaveFPRegs);
+ StoreBufferOverflowStub stub(isolate, kDontSaveFPRegs);
stub.GetCode(isolate);
if (CpuFeatures::IsSafeForSnapshot(SSE2)) {
- StoreBufferOverflowStub stub2(kSaveFPRegs);
+ StoreBufferOverflowStub stub2(isolate, kSaveFPRegs);
stub2.GetCode(isolate);
}
}
__ mov(Operand(esp, 0 * kPointerSize), regs_.object());
__ mov(Operand(esp, 1 * kPointerSize), regs_.address()); // Slot.
__ mov(Operand(esp, 2 * kPointerSize),
- Immediate(ExternalReference::isolate_address(masm->isolate())));
+ Immediate(ExternalReference::isolate_address(isolate())));
AllowExternalCallThatCantCauseGC scope(masm);
__ CallCFunction(
- ExternalReference::incremental_marking_record_write_function(
- masm->isolate()),
+ ExternalReference::incremental_marking_record_write_function(isolate()),
argument_count);
regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_);
void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
- CEntryStub ces(1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs);
- __ call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
+ CEntryStub ces(isolate(), 1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs);
+ __ call(ces.GetCode(isolate()), RelocInfo::CODE_TARGET);
int parameter_count_offset =
StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset;
__ mov(ebx, MemOperand(ebp, parameter_count_offset));
void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
if (masm->isolate()->function_entry_hook() != NULL) {
- ProfileEntryHookStub stub;
+ ProfileEntryHookStub stub(masm->isolate());
masm->CallStub(&stub);
}
}
__ push(eax);
// Call the entry hook.
- ASSERT(masm->isolate()->function_entry_hook() != NULL);
- __ call(FUNCTION_ADDR(masm->isolate()->function_entry_hook()),
+ ASSERT(isolate()->function_entry_hook() != NULL);
+ __ call(FUNCTION_ADDR(isolate()->function_entry_hook()),
RelocInfo::RUNTIME_ENTRY);
__ add(esp, Immediate(2 * kPointerSize));
static void CreateArrayDispatch(MacroAssembler* masm,
AllocationSiteOverrideMode mode) {
if (mode == DISABLE_ALLOCATION_SITES) {
- T stub(GetInitialFastElementsKind(),
+ T stub(masm->isolate(),
+ GetInitialFastElementsKind(),
mode);
__ TailCallStub(&stub);
} else if (mode == DONT_OVERRIDE) {
ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
__ cmp(edx, kind);
__ j(not_equal, &next);
- T stub(kind);
+ T stub(masm->isolate(), kind);
__ TailCallStub(&stub);
__ bind(&next);
}
ElementsKind initial = GetInitialFastElementsKind();
ElementsKind holey_initial = GetHoleyElementsKind(initial);
- ArraySingleArgumentConstructorStub stub_holey(holey_initial,
+ ArraySingleArgumentConstructorStub stub_holey(masm->isolate(),
+ holey_initial,
DISABLE_ALLOCATION_SITES);
__ TailCallStub(&stub_holey);
__ bind(&normal_sequence);
- ArraySingleArgumentConstructorStub stub(initial,
+ ArraySingleArgumentConstructorStub stub(masm->isolate(),
+ initial,
DISABLE_ALLOCATION_SITES);
__ TailCallStub(&stub);
} else if (mode == DONT_OVERRIDE) {
ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
__ cmp(edx, kind);
__ j(not_equal, &next);
- ArraySingleArgumentConstructorStub stub(kind);
+ ArraySingleArgumentConstructorStub stub(masm->isolate(), kind);
__ TailCallStub(&stub);
__ bind(&next);
}
TERMINAL_FAST_ELEMENTS_KIND);
for (int i = 0; i <= to_index; ++i) {
ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
- T stub(kind);
+ T stub(isolate, kind);
stub.GetCode(isolate);
if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) {
- T stub1(kind, DISABLE_ALLOCATION_SITES);
+ T stub1(isolate, kind, DISABLE_ALLOCATION_SITES);
stub1.GetCode(isolate);
}
}
ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS };
for (int i = 0; i < 2; i++) {
// For internal arrays we only need a few things
- InternalArrayNoArgumentConstructorStub stubh1(kinds[i]);
+ InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[i]);
stubh1.GetCode(isolate);
- InternalArraySingleArgumentConstructorStub stubh2(kinds[i]);
+ InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]);
stubh2.GetCode(isolate);
- InternalArrayNArgumentsConstructorStub stubh3(kinds[i]);
+ InternalArrayNArgumentsConstructorStub stubh3(isolate, kinds[i]);
stubh3.GetCode(isolate);
}
}
Label no_info;
// If the feedback vector is the undefined value call an array constructor
// that doesn't use AllocationSites.
- __ cmp(ebx, masm->isolate()->factory()->undefined_value());
+ __ cmp(ebx, isolate()->factory()->undefined_value());
__ j(equal, &no_info);
// Only look at the lower 16 bits of the transition info.
__ test(eax, eax);
__ j(not_zero, ¬_zero_case);
- InternalArrayNoArgumentConstructorStub stub0(kind);
+ InternalArrayNoArgumentConstructorStub stub0(isolate(), kind);
__ TailCallStub(&stub0);
__ bind(¬_zero_case);
__ j(zero, &normal_sequence);
InternalArraySingleArgumentConstructorStub
- stub1_holey(GetHoleyElementsKind(kind));
+ stub1_holey(isolate(), GetHoleyElementsKind(kind));
__ TailCallStub(&stub1_holey);
}
__ bind(&normal_sequence);
- InternalArraySingleArgumentConstructorStub stub1(kind);
+ InternalArraySingleArgumentConstructorStub stub1(isolate(), kind);
__ TailCallStub(&stub1);
__ bind(¬_one_case);
- InternalArrayNArgumentsConstructorStub stubN(kind);
+ InternalArrayNArgumentsConstructorStub stubN(isolate(), kind);
__ TailCallStub(&stubN);
}
STATIC_ASSERT(FCA::kHolderIndex == 0);
STATIC_ASSERT(FCA::kArgsLength == 7);
- Isolate* isolate = masm->isolate();
-
__ pop(return_address);
// context save
Register scratch = call_data;
if (!call_data_undefined) {
// return value
- __ push(Immediate(isolate->factory()->undefined_value()));
+ __ push(Immediate(isolate()->factory()->undefined_value()));
// return value default
- __ push(Immediate(isolate->factory()->undefined_value()));
+ __ push(Immediate(isolate()->factory()->undefined_value()));
} else {
// return value
__ push(scratch);
__ push(scratch);
}
// isolate
- __ push(Immediate(reinterpret_cast<int>(isolate)));
+ __ push(Immediate(reinterpret_cast<int>(isolate())));
// holder
__ push(holder);
class StoreBufferOverflowStub: public PlatformCodeStub {
public:
- explicit StoreBufferOverflowStub(SaveFPRegsMode save_fp)
- : save_doubles_(save_fp) {
+ StoreBufferOverflowStub(Isolate* isolate, SaveFPRegsMode save_fp)
+ : PlatformCodeStub(isolate), save_doubles_(save_fp) {
ASSERT(CpuFeatures::IsSafeForSnapshot(SSE2) || save_fp == kDontSaveFPRegs);
}
class SubStringStub: public PlatformCodeStub {
public:
- SubStringStub() {}
+ explicit SubStringStub(Isolate* isolate) : PlatformCodeStub(isolate) {}
private:
Major MajorKey() { return SubString; }
class StringCompareStub: public PlatformCodeStub {
public:
- StringCompareStub() { }
+ explicit StringCompareStub(Isolate* isolate) : PlatformCodeStub(isolate) { }
// Compares two flat ASCII strings and returns result in eax.
static void GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
public:
enum LookupMode { POSITIVE_LOOKUP, NEGATIVE_LOOKUP };
- NameDictionaryLookupStub(Register dictionary,
+ NameDictionaryLookupStub(Isolate* isolate,
+ Register dictionary,
Register result,
Register index,
LookupMode mode)
- : dictionary_(dictionary), result_(result), index_(index), mode_(mode) { }
+ : PlatformCodeStub(isolate),
+ dictionary_(dictionary), result_(result), index_(index), mode_(mode) { }
void Generate(MacroAssembler* masm);
class RecordWriteStub: public PlatformCodeStub {
public:
- RecordWriteStub(Register object,
+ RecordWriteStub(Isolate* isolate,
+ Register object,
Register value,
Register address,
RememberedSetAction remembered_set_action,
SaveFPRegsMode fp_mode)
- : object_(object),
+ : PlatformCodeStub(isolate),
+ object_(object),
value_(value),
address_(address),
remembered_set_action_(remembered_set_action),
__ Move(eax, Immediate(0)); // No arguments.
__ mov(ebx, Immediate(ExternalReference::debug_break(masm->isolate())));
- CEntryStub ceb(1);
+ CEntryStub ceb(masm->isolate(), 1);
__ CallStub(&ceb);
// Automatically find register that could be used after register restore.
__ Push(info->scope()->GetScopeInfo());
__ CallRuntime(Runtime::kHiddenNewGlobalContext, 2);
} else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
- FastNewContextStub stub(heap_slots);
+ FastNewContextStub stub(isolate(), heap_slots);
__ CallStub(&stub);
} else {
__ push(edi);
} else {
type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
}
- ArgumentsAccessStub stub(type);
+ ArgumentsAccessStub stub(isolate(), type);
__ CallStub(&stub);
SetVar(arguments, eax, ebx, edx);
!pretenure &&
scope()->is_function_scope() &&
info->num_literals() == 0) {
- FastNewClosureStub stub(info->strict_mode(), info->is_generator());
+ FastNewClosureStub stub(isolate(),
+ info->strict_mode(),
+ info->is_generator());
__ mov(ebx, Immediate(info));
__ CallStub(&stub);
} else {
__ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
__ mov(ecx, Immediate(constant_properties));
__ mov(edx, Immediate(Smi::FromInt(flags)));
- FastCloneShallowObjectStub stub(properties_count);
+ FastCloneShallowObjectStub stub(isolate(), properties_count);
__ CallStub(&stub);
}
__ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
__ mov(ecx, Immediate(constant_elements));
FastCloneShallowArrayStub stub(
+ isolate(),
FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
allocation_site_mode,
length);
__ mov(eax, FieldOperand(ebx, JSFunction::kLiteralsOffset));
__ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
__ mov(ecx, Immediate(constant_elements));
- FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
+ FastCloneShallowArrayStub stub(isolate(),
+ mode,
+ allocation_site_mode,
+ length);
__ CallStub(&stub);
}
} else {
// Store the subexpression value in the array's elements.
__ mov(ecx, Immediate(Smi::FromInt(i)));
- StoreArrayLiteralElementStub stub;
+ StoreArrayLiteralElementStub stub(isolate());
__ CallStub(&stub);
}
CallIC(ic, TypeFeedbackId::None());
__ mov(edi, eax);
__ mov(Operand(esp, 2 * kPointerSize), edi);
- CallFunctionStub stub(1, CALL_AS_METHOD);
+ CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
__ CallStub(&stub);
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
__ bind(&stub_call);
__ mov(eax, ecx);
- BinaryOpICStub stub(op, mode);
+ BinaryOpICStub stub(isolate(), op, mode);
CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
patch_site.EmitPatchInfo();
__ jmp(&done, Label::kNear);
Token::Value op,
OverwriteMode mode) {
__ pop(edx);
- BinaryOpICStub stub(op, mode);
+ BinaryOpICStub stub(isolate(), op, mode);
JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
patch_site.EmitPatchInfo();
// Record source position of the IC call.
SetSourcePosition(expr->position());
- CallFunctionStub stub(arg_count, flags);
+ CallFunctionStub stub(isolate(), arg_count, flags);
__ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
__ CallStub(&stub);
RecordJSReturnSite(expr);
// Record source position of the IC call.
SetSourcePosition(expr->position());
- CallFunctionStub stub(arg_count, CALL_AS_METHOD);
+ CallFunctionStub stub(isolate(), arg_count, CALL_AS_METHOD);
__ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
__ CallStub(&stub);
RecordJSReturnSite(expr);
__ mov(edx, Immediate(Smi::FromInt(expr->CallFeedbackSlot())));
// Record call targets in unoptimized code.
- CallFunctionStub stub(arg_count, RECORD_CALL_TARGET);
+ CallFunctionStub stub(isolate(), arg_count, RECORD_CALL_TARGET);
__ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
__ CallStub(&stub);
}
// Record source position for debugger.
SetSourcePosition(expr->position());
- CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS);
+ CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
__ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
__ CallStub(&stub);
RecordJSReturnSite(expr);
__ LoadHeapObject(ebx, FeedbackVector());
__ mov(edx, Immediate(Smi::FromInt(expr->CallNewFeedbackSlot())));
- CallConstructStub stub(RECORD_CALL_TARGET);
+ CallConstructStub stub(isolate(), RECORD_CALL_TARGET);
__ call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL);
PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
context()->Plug(eax);
VisitForAccumulatorValue(args->at(0));
__ mov(edx, eax);
__ Move(eax, Immediate(Smi::FromInt(info_->scope()->num_parameters())));
- ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
+ ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
__ CallStub(&stub);
context()->Plug(eax);
}
void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
// Load the arguments on the stack and call the stub.
- SubStringStub stub;
+ SubStringStub stub(isolate());
ZoneList<Expression*>* args = expr->arguments();
ASSERT(args->length() == 3);
VisitForStackValue(args->at(0));
void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
// Load the arguments on the stack and call the stub.
- RegExpExecStub stub;
+ RegExpExecStub stub(isolate());
ZoneList<Expression*>* args = expr->arguments();
ASSERT(args->length() == 4);
VisitForStackValue(args->at(0));
VisitForStackValue(args->at(1));
if (CpuFeatures::IsSupported(SSE2)) {
- MathPowStub stub(MathPowStub::ON_STACK);
+ MathPowStub stub(isolate(), MathPowStub::ON_STACK);
__ CallStub(&stub);
} else {
__ CallRuntime(Runtime::kHiddenMathPowSlow, 2);
// Load the argument into eax and call the stub.
VisitForAccumulatorValue(args->at(0));
- NumberToStringStub stub;
+ NumberToStringStub stub(isolate());
__ CallStub(&stub);
context()->Plug(eax);
}
VisitForAccumulatorValue(args->at(1));
__ pop(edx);
- StringAddStub stub(STRING_ADD_CHECK_BOTH, NOT_TENURED);
+ StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
__ CallStub(&stub);
context()->Plug(eax);
}
VisitForStackValue(args->at(0));
VisitForStackValue(args->at(1));
- StringCompareStub stub;
+ StringCompareStub stub(isolate());
__ CallStub(&stub);
context()->Plug(eax);
}
void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
// Load the arguments on the stack and call the stub.
- RegExpConstructResultStub stub;
+ RegExpConstructResultStub stub(isolate());
ZoneList<Expression*>* args = expr->arguments();
ASSERT(args->length() == 3);
VisitForStackValue(args->at(0));
// Record source position of the IC call.
SetSourcePosition(expr->position());
- CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS);
+ CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
__ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
__ CallStub(&stub);
// Restore context register.
__ jmp(&stub_call, Label::kNear);
__ bind(&slow);
}
- ToNumberStub convert_stub;
+ ToNumberStub convert_stub(isolate());
__ CallStub(&convert_stub);
// Save result for postfix expressions.
__ bind(&stub_call);
__ mov(edx, eax);
__ mov(eax, Immediate(Smi::FromInt(1)));
- BinaryOpICStub stub(expr->binary_op(), NO_OVERWRITE);
+ BinaryOpICStub stub(isolate(), expr->binary_op(), NO_OVERWRITE);
CallIC(stub.GetCode(isolate()), expr->CountBinOpFeedbackId());
patch_site.EmitPatchInfo();
__ bind(&done);
case Token::INSTANCEOF: {
VisitForStackValue(expr->right());
- InstanceofStub stub(InstanceofStub::kNoFlags);
+ InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
__ CallStub(&stub);
PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
__ test(eax, eax);
Comment(";;; Allocate local context");
// Argument to NewContext is the function, which is still in edi.
if (heap_slots <= FastNewContextStub::kMaximumSlots) {
- FastNewContextStub stub(heap_slots);
+ FastNewContextStub stub(isolate(), heap_slots);
__ CallStub(&stub);
} else {
__ push(edi);
ASSERT(ToRegister(instr->result()).is(eax));
switch (instr->hydrogen()->major_key()) {
case CodeStub::RegExpExec: {
- RegExpExecStub stub;
+ RegExpExecStub stub(isolate());
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
break;
}
case CodeStub::SubString: {
- SubStringStub stub;
+ SubStringStub stub(isolate());
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
break;
}
case CodeStub::StringCompare: {
- StringCompareStub stub;
+ StringCompareStub stub(isolate());
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
break;
}
ASSERT(ToRegister(instr->right()).is(eax));
ASSERT(ToRegister(instr->result()).is(eax));
- BinaryOpICStub stub(instr->op(), NO_OVERWRITE);
+ BinaryOpICStub stub(isolate(), instr->op(), NO_OVERWRITE);
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
}
void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
// Object and function are in fixed registers defined by the stub.
ASSERT(ToRegister(instr->context()).is(esi));
- InstanceofStub stub(InstanceofStub::kArgsInRegisters);
+ InstanceofStub stub(isolate(), InstanceofStub::kArgsInRegisters);
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
Label true_value, done;
flags | InstanceofStub::kCallSiteInlineCheck);
flags = static_cast<InstanceofStub::Flags>(
flags | InstanceofStub::kReturnTrueFalseObject);
- InstanceofStub stub(flags);
+ InstanceofStub stub(isolate(), flags);
// Get the temp register reserved by the instruction. This needs to be a
// register which is pushed last by PushSafepointRegisters as top of the
ASSERT(ToDoubleRegister(instr->result()).is(xmm3));
if (exponent_type.IsSmi()) {
- MathPowStub stub(MathPowStub::TAGGED);
+ MathPowStub stub(isolate(), MathPowStub::TAGGED);
__ CallStub(&stub);
} else if (exponent_type.IsTagged()) {
Label no_deopt;
__ CmpObjectType(eax, HEAP_NUMBER_TYPE, ecx);
DeoptimizeIf(not_equal, instr->environment());
__ bind(&no_deopt);
- MathPowStub stub(MathPowStub::TAGGED);
+ MathPowStub stub(isolate(), MathPowStub::TAGGED);
__ CallStub(&stub);
} else if (exponent_type.IsInteger32()) {
- MathPowStub stub(MathPowStub::INTEGER);
+ MathPowStub stub(isolate(), MathPowStub::INTEGER);
__ CallStub(&stub);
} else {
ASSERT(exponent_type.IsDouble());
- MathPowStub stub(MathPowStub::DOUBLE);
+ MathPowStub stub(isolate(), MathPowStub::DOUBLE);
__ CallStub(&stub);
}
}
ASSERT(ToRegister(instr->result()).is(eax));
int arity = instr->arity();
- CallFunctionStub stub(arity, instr->hydrogen()->function_flags());
+ CallFunctionStub stub(isolate(), arity, instr->hydrogen()->function_flags());
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
}
// No cell in ebx for construct type feedback in optimized code
__ mov(ebx, isolate()->factory()->undefined_value());
- CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
+ CallConstructStub stub(isolate(), NO_CALL_FUNCTION_FLAGS);
__ Move(eax, Immediate(instr->arity()));
CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
}
: DONT_OVERRIDE;
if (instr->arity() == 0) {
- ArrayNoArgumentConstructorStub stub(kind, override_mode);
+ ArrayNoArgumentConstructorStub stub(isolate(), kind, override_mode);
CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
} else if (instr->arity() == 1) {
Label done;
__ j(zero, &packed_case, Label::kNear);
ElementsKind holey_kind = GetHoleyElementsKind(kind);
- ArraySingleArgumentConstructorStub stub(holey_kind, override_mode);
+ ArraySingleArgumentConstructorStub stub(isolate(),
+ holey_kind,
+ override_mode);
CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
__ jmp(&done, Label::kNear);
__ bind(&packed_case);
}
- ArraySingleArgumentConstructorStub stub(kind, override_mode);
+ ArraySingleArgumentConstructorStub stub(isolate(), kind, override_mode);
CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
__ bind(&done);
} else {
- ArrayNArgumentsConstructorStub stub(kind, override_mode);
+ ArrayNArgumentsConstructorStub stub(isolate(), kind, override_mode);
CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
}
}
PushSafepointRegistersScope scope(this);
__ mov(ebx, to_map);
bool is_js_array = from_map->instance_type() == JS_ARRAY_TYPE;
- TransitionElementsKindStub stub(from_kind, to_kind, is_js_array);
+ TransitionElementsKindStub stub(isolate(), from_kind, to_kind, is_js_array);
__ CallStub(&stub);
RecordSafepointWithLazyDeopt(instr,
RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
ASSERT(ToRegister(instr->context()).is(esi));
ASSERT(ToRegister(instr->left()).is(edx));
ASSERT(ToRegister(instr->right()).is(eax));
- StringAddStub stub(instr->hydrogen()->flags(),
+ StringAddStub stub(isolate(),
+ instr->hydrogen()->flags(),
instr->hydrogen()->pretenure_flag());
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
}
// space for nested functions that don't need literals cloning.
bool pretenure = instr->hydrogen()->pretenure();
if (!pretenure && instr->hydrogen()->has_no_literals()) {
- FastNewClosureStub stub(instr->hydrogen()->strict_mode(),
+ FastNewClosureStub stub(isolate(),
+ instr->hydrogen()->strict_mode(),
instr->hydrogen()->is_generator());
__ mov(ebx, Immediate(instr->hydrogen()->shared_info()));
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
j(equal, &done, Label::kNear);
}
StoreBufferOverflowStub store_buffer_overflow =
- StoreBufferOverflowStub(save_fp);
+ StoreBufferOverflowStub(isolate(), save_fp);
CallStub(&store_buffer_overflow);
if (and_then == kReturnAtEnd) {
ret(0);
void MacroAssembler::SlowTruncateToI(Register result_reg,
Register input_reg,
int offset) {
- DoubleToIStub stub(input_reg, result_reg, offset, true);
+ DoubleToIStub stub(isolate(), input_reg, result_reg, offset, true);
call(stub.GetCode(isolate()), RelocInfo::CODE_TARGET);
}
// them.
lea(address, FieldOperand(object, HeapObject::kMapOffset));
mov(value, Immediate(map));
- RecordWriteStub stub(object, value, address, OMIT_REMEMBERED_SET, save_fp);
+ RecordWriteStub stub(isolate(), object, value, address, OMIT_REMEMBERED_SET,
+ save_fp);
CallStub(&stub);
bind(&done);
&done,
Label::kNear);
- RecordWriteStub stub(object, value, address, remembered_set_action, fp_mode);
+ RecordWriteStub stub(isolate(), object, value, address, remembered_set_action,
+ fp_mode);
CallStub(&stub);
bind(&done);
void MacroAssembler::DebugBreak() {
Move(eax, Immediate(0));
mov(ebx, Immediate(ExternalReference(Runtime::kDebugBreak, isolate())));
- CEntryStub ces(1);
+ CEntryStub ces(isolate(), 1);
call(ces.GetCode(isolate()), RelocInfo::DEBUG_BREAK);
}
#endif
// smarter.
Move(eax, Immediate(num_arguments));
mov(ebx, Immediate(ExternalReference(f, isolate())));
- CEntryStub ces(1, CpuFeatures::IsSupported(SSE2) ? save_doubles
- : kDontSaveFPRegs);
+ CEntryStub ces(isolate(),
+ 1,
+ CpuFeatures::IsSupported(SSE2) ? save_doubles
+ : kDontSaveFPRegs);
CallStub(&ces);
}
mov(eax, Immediate(num_arguments));
mov(ebx, Immediate(ref));
- CEntryStub stub(1);
+ CEntryStub stub(isolate(), 1);
CallStub(&stub);
}
void MacroAssembler::JumpToExternalReference(const ExternalReference& ext) {
// Set the entry point and jump to the C entry runtime stub.
mov(ebx, Immediate(ext));
- CEntryStub ces(1);
+ CEntryStub ces(isolate(), 1);
jmp(ces.GetCode(isolate()), RelocInfo::CODE_TARGET);
}
__ mov(api_function_address, Immediate(function_address));
// Jump to stub.
- CallApiFunctionStub stub(is_store, call_data_undefined, argc);
+ CallApiFunctionStub stub(isolate, is_store, call_data_undefined, argc);
__ TailCallStub(&stub);
}
Representation representation) {
if (!reg.is(receiver())) __ mov(receiver(), reg);
if (kind() == Code::LOAD_IC) {
- LoadFieldStub stub(field.is_inobject(holder),
+ LoadFieldStub stub(isolate(),
+ field.is_inobject(holder),
field.translate(holder),
representation);
GenerateTailCall(masm(), stub.GetCode(isolate()));
} else {
- KeyedLoadFieldStub stub(field.is_inobject(holder),
+ KeyedLoadFieldStub stub(isolate(),
+ field.is_inobject(holder),
field.translate(holder),
representation);
GenerateTailCall(masm(), stub.GetCode(isolate()));
Address function_address = v8::ToCData<Address>(callback->getter());
__ mov(getter_address, Immediate(function_address));
- CallApiGetterStub stub;
+ CallApiGetterStub stub(isolate());
__ TailCallStub(&stub);
}
if (state() == UNINITIALIZED) {
stub = pre_monomorphic_stub();
} else if (state() == PREMONOMORPHIC) {
- FunctionPrototypeStub function_prototype_stub(kind());
+ FunctionPrototypeStub function_prototype_stub(isolate(), kind());
stub = function_prototype_stub.GetCode(isolate());
} else if (state() != MEGAMORPHIC) {
ASSERT(state() != GENERIC);
bool inobject,
Representation representation) {
if (kind() == Code::LOAD_IC) {
- LoadFieldStub stub(inobject, offset, representation);
+ LoadFieldStub stub(isolate(), inobject, offset, representation);
return stub.GetCode(isolate());
} else {
- KeyedLoadFieldStub stub(inobject, offset, representation);
+ KeyedLoadFieldStub stub(isolate(), inobject, offset, representation);
return stub.GetCode(isolate());
}
}
if (object->IsStringWrapper() &&
String::Equals(isolate()->factory()->length_string(), name)) {
if (kind() == Code::LOAD_IC) {
- StringLengthStub string_length_stub;
+ StringLengthStub string_length_stub(isolate());
return string_length_stub.GetCode(isolate());
} else {
- KeyedStringLengthStub string_length_stub;
+ KeyedStringLengthStub string_length_stub(isolate());
return string_length_stub.GetCode(isolate());
}
}
Handle<PropertyCell> cell(global->GetPropertyCell(lookup), isolate());
Handle<HeapType> union_type = PropertyCell::UpdatedType(cell, value);
StoreGlobalStub stub(
- union_type->IsConstant(), receiver->IsJSGlobalProxy());
+ isolate(), union_type->IsConstant(), receiver->IsJSGlobalProxy());
Handle<Code> code = stub.GetCodeCopyFromTemplate(
isolate(), global, cell);
// TODO(verwaest): Move caching of these NORMAL stubs outside as well.
}
// Install the stub with an allocation site.
- BinaryOpICWithAllocationSiteStub stub(state);
+ BinaryOpICWithAllocationSiteStub stub(isolate(), state);
target = stub.GetCodeCopyFromTemplate(isolate(), allocation_site);
// Sanity check the trampoline stub.
ASSERT_EQ(*allocation_site, target->FindFirstAllocationSite());
} else {
// Install the generic stub.
- BinaryOpICStub stub(state);
+ BinaryOpICStub stub(isolate(), state);
target = stub.GetCode(isolate());
// Sanity check the generic stub.
Code* CompareIC::GetRawUninitialized(Isolate* isolate, Token::Value op) {
- ICCompareStub stub(op, UNINITIALIZED, UNINITIALIZED, UNINITIALIZED);
+ ICCompareStub stub(isolate, op, UNINITIALIZED, UNINITIALIZED, UNINITIALIZED);
Code* code = NULL;
CHECK(stub.FindCodeInCache(&code, isolate));
return code;
Handle<Code> CompareIC::GetUninitialized(Isolate* isolate, Token::Value op) {
- ICCompareStub stub(op, UNINITIALIZED, UNINITIALIZED, UNINITIALIZED);
+ ICCompareStub stub(isolate, op, UNINITIALIZED, UNINITIALIZED, UNINITIALIZED);
return stub.GetCode(isolate);
}
State new_right = NewInputState(previous_right, y);
State state = TargetState(previous_state, previous_left, previous_right,
HasInlinedSmiCode(address()), x, y);
- ICCompareStub stub(op_, new_left, new_right, state);
+ ICCompareStub stub(isolate(), op_, new_left, new_right, state);
if (state == KNOWN_OBJECT) {
stub.set_known_map(
Handle<Map>(Handle<JSObject>::cast(x)->map(), isolate()));
if (IsCleared(target)) return;
ExtraICState state = target->extra_ic_state();
- CompareNilICStub stub(state, HydrogenCodeStub::UNINITIALIZED);
+ CompareNilICStub stub(target->GetIsolate(),
+ state,
+ HydrogenCodeStub::UNINITIALIZED);
stub.ClearState();
Code* code = NULL;
Handle<Object> CompareNilIC::CompareNil(Handle<Object> object) {
ExtraICState extra_ic_state = target()->extra_ic_state();
- CompareNilICStub stub(extra_ic_state);
+ CompareNilICStub stub(isolate(), extra_ic_state);
// Extract the current supported types from the patched IC and calculate what
// types must be supported as a result of the miss.
Handle<Object> ToBooleanIC::ToBoolean(Handle<Object> object) {
- ToBooleanStub stub(target()->extra_ic_state());
+ ToBooleanStub stub(isolate(), target()->extra_ic_state());
bool to_boolean_value = stub.UpdateStatus(object);
Handle<Code> code = stub.GetCode(isolate());
set_target(*code);
receiver_map->has_external_array_elements() ||
receiver_map->has_fixed_typed_array_elements()) {
Handle<Code> stub = KeyedLoadFastElementStub(
+ isolate(),
receiver_map->instance_type() == JS_ARRAY_TYPE,
elements_kind).GetCode(isolate());
__ DispatchMap(receiver(), scratch1(), receiver_map, stub, DO_SMI_CHECK);
} else {
Handle<Code> stub = FLAG_compiled_keyed_dictionary_loads
- ? KeyedLoadDictionaryElementStub().GetCode(isolate())
- : KeyedLoadDictionaryElementPlatformStub().GetCode(isolate());
+ ? KeyedLoadDictionaryElementStub(isolate()).GetCode(isolate())
+ : KeyedLoadDictionaryElementPlatformStub(isolate()).GetCode(isolate());
__ DispatchMap(receiver(), scratch1(), receiver_map, stub, DO_SMI_CHECK);
}
receiver_map->has_external_array_elements() ||
receiver_map->has_fixed_typed_array_elements()) {
stub = KeyedStoreFastElementStub(
+ isolate(),
is_jsarray,
elements_kind,
store_mode()).GetCode(isolate());
} else {
- stub = KeyedStoreElementStub(is_jsarray,
+ stub = KeyedStoreElementStub(isolate(),
+ is_jsarray,
elements_kind,
store_mode()).GetCode(isolate());
}
IsExternalArrayElementsKind(elements_kind) ||
IsFixedTypedArrayElementsKind(elements_kind)) {
cached_stub =
- KeyedLoadFastElementStub(is_js_array,
+ KeyedLoadFastElementStub(isolate(),
+ is_js_array,
elements_kind).GetCode(isolate());
} else if (elements_kind == SLOPPY_ARGUMENTS_ELEMENTS) {
cached_stub = isolate()->builtins()->KeyedLoadIC_SloppyArguments();
} else {
ASSERT(elements_kind == DICTIONARY_ELEMENTS);
- cached_stub = KeyedLoadDictionaryElementStub().GetCode(isolate());
+ cached_stub =
+ KeyedLoadDictionaryElementStub(isolate()).GetCode(isolate());
}
}
ElementsKind elements_kind = receiver_map->elements_kind();
if (!transitioned_map.is_null()) {
cached_stub = ElementsTransitionAndStoreStub(
+ isolate(),
elements_kind,
transitioned_map->elements_kind(),
is_js_array,
receiver_map->has_external_array_elements() ||
receiver_map->has_fixed_typed_array_elements()) {
cached_stub = KeyedStoreFastElementStub(
+ isolate(),
is_js_array,
elements_kind,
store_mode()).GetCode(isolate());
} else {
cached_stub = KeyedStoreElementStub(
+ isolate(),
is_js_array,
elements_kind,
store_mode()).GetCode(isolate());
CompareIC::StubInfoToType(
stub_minor_key, left_type, right_type, combined_type, map, zone());
} else if (code->is_compare_nil_ic_stub()) {
- CompareNilICStub stub(code->extra_ic_state());
+ CompareNilICStub stub(isolate(), code->extra_ic_state());
*combined_type = stub.GetType(zone(), map);
*left_type = *right_type = stub.GetInputType(zone(), map);
}
TypeFeedbackId id, Handle<String> name,
SmallMapList* receiver_types, bool* is_prototype) {
receiver_types->Clear();
- FunctionPrototypeStub proto_stub(Code::LOAD_IC);
+ FunctionPrototypeStub proto_stub(isolate(), Code::LOAD_IC);
*is_prototype = LoadIsStub(id, &proto_stub);
if (!*is_prototype) {
Code::Flags flags = Code::ComputeHandlerFlags(Code::LOAD_IC);
// No type feedback cell is available
__ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
// Expects rdi to hold function pointer.
- CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
+ CallConstructStub stub(masm->isolate(), NO_CALL_FUNCTION_FLAGS);
__ CallStub(&stub);
} else {
ParameterCount actual(rax);
void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) {
// Update the static counter each time a new code stub is generated.
- Isolate* isolate = masm->isolate();
- isolate->counters()->code_stubs()->Increment();
+ isolate()->counters()->code_stubs()->Increment();
- CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(isolate);
+ CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(isolate());
int param_count = descriptor->register_param_count_;
{
// Call the runtime system in a fresh internal frame.
const int argument_count = 1;
__ PrepareCallCFunction(argument_count);
__ LoadAddress(arg_reg_1,
- ExternalReference::isolate_address(masm->isolate()));
+ ExternalReference::isolate_address(isolate()));
AllowExternalCallThatCantCauseGC scope(masm);
__ CallCFunction(
- ExternalReference::store_buffer_overflow_function(masm->isolate()),
+ ExternalReference::store_buffer_overflow_function(isolate()),
argument_count);
__ PopCallerSaved(save_doubles_);
__ ret(0);
__ Cvtlsi2sd(double_exponent, exponent);
// Returning or bailing out.
- Counters* counters = masm->isolate()->counters();
+ Counters* counters = isolate()->counters();
if (exponent_type_ == ON_STACK) {
// The arguments are still on the stack.
__ bind(&call_runtime);
AllowExternalCallThatCantCauseGC scope(masm);
__ PrepareCallCFunction(2);
__ CallCFunction(
- ExternalReference::power_double_double_function(masm->isolate()), 2);
+ ExternalReference::power_double_double_function(isolate()), 2);
}
// Return value is in xmm0.
__ movsd(double_result, xmm0);
// -- rdx : receiver
// -- rsp[0] : return address
// -----------------------------------
- __ Cmp(rax, masm->isolate()->factory()->prototype_string());
+ __ Cmp(rax, isolate()->factory()->prototype_string());
__ j(not_equal, &miss);
receiver = rdx;
} else {
// rbx: the mapped parameter count (untagged)
// rax: the allocated object (tagged).
- Factory* factory = masm->isolate()->factory();
+ Factory* factory = isolate()->factory();
StackArgumentsAccessor args(rsp, 3, ARGUMENTS_DONT_CONTAIN_RECEIVER);
__ SmiToInteger64(rbx, args.GetArgumentOperand(2));
ARGUMENTS_DONT_CONTAIN_RECEIVER);
Label runtime;
// Ensure that a RegExp stack is allocated.
- Isolate* isolate = masm->isolate();
ExternalReference address_of_regexp_stack_memory_address =
- ExternalReference::address_of_regexp_stack_memory_address(isolate);
+ ExternalReference::address_of_regexp_stack_memory_address(isolate());
ExternalReference address_of_regexp_stack_memory_size =
- ExternalReference::address_of_regexp_stack_memory_size(isolate);
+ ExternalReference::address_of_regexp_stack_memory_size(isolate());
__ Load(kScratchRegister, address_of_regexp_stack_memory_size);
__ testp(kScratchRegister, kScratchRegister);
__ j(zero, &runtime);
// rcx: encoding of subject string (1 if ASCII 0 if two_byte);
// r11: code
// All checks done. Now push arguments for native regexp code.
- Counters* counters = masm->isolate()->counters();
+ Counters* counters = isolate()->counters();
__ IncrementCounter(counters->regexp_entry_native(), 1);
// Isolates: note we add an additional parameter here (isolate pointer).
// Argument 9: Pass current isolate address.
__ LoadAddress(kScratchRegister,
- ExternalReference::isolate_address(masm->isolate()));
+ ExternalReference::isolate_address(isolate()));
__ movq(Operand(rsp, (argument_slots_on_stack - 1) * kRegisterSize),
kScratchRegister);
#endif
// Argument 5: static offsets vector buffer.
- __ LoadAddress(r8,
- ExternalReference::address_of_static_offsets_vector(isolate));
+ __ LoadAddress(
+ r8, ExternalReference::address_of_static_offsets_vector(isolate()));
// Argument 5 passed in r8 on Linux and on the stack on Windows.
#ifdef _WIN64
__ movq(Operand(rsp, (argument_slots_on_stack - 5) * kRegisterSize), r8);
kDontSaveFPRegs);
// Get the static offsets vector filled by the native regexp code.
- __ LoadAddress(rcx,
- ExternalReference::address_of_static_offsets_vector(isolate));
+ __ LoadAddress(
+ rcx, ExternalReference::address_of_static_offsets_vector(isolate()));
// rbx: last_match_info backing store (FixedArray)
// rcx: offsets vector
// haven't created the exception yet. Handle that in the runtime system.
// TODO(592): Rerunning the RegExp to get the stack overflow exception.
ExternalReference pending_exception_address(
- Isolate::kPendingExceptionAddress, isolate);
+ Isolate::kPendingExceptionAddress, isolate());
Operand pending_exception_operand =
masm->ExternalOperand(pending_exception_address, rbx);
__ movp(rax, pending_exception_operand);
void ICCompareStub::GenerateGeneric(MacroAssembler* masm) {
Label check_unequal_objects, done;
Condition cc = GetCondition();
- Factory* factory = masm->isolate()->factory();
+ Factory* factory = isolate()->factory();
Label miss;
CheckInputType(masm, rdx, left_, &miss);
__ Push(rdx);
__ Push(rbx);
- CreateAllocationSiteStub create_stub;
+ CreateAllocationSiteStub create_stub(isolate);
__ CallStub(&create_stub);
__ Pop(rbx);
// rdx : (only if rbx is not the megamorphic symbol) slot in feedback
// vector (Smi)
// rdi : the function to call
- Isolate* isolate = masm->isolate();
Label slow, non_function, wrap, cont;
StackArgumentsAccessor args(rsp, argc_);
__ SmiToInteger32(rdx, rdx);
__ Move(FieldOperand(rbx, rdx, times_pointer_size,
FixedArray::kHeaderSize),
- TypeFeedbackInfo::MegamorphicSentinel(isolate));
+ TypeFeedbackInfo::MegamorphicSentinel(isolate()));
__ Integer32ToSmi(rdx, rdx);
}
// Check for function proxy.
__ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY);
{
Handle<Code> adaptor =
- masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
+ isolate()->builtins()->ArgumentsAdaptorTrampoline();
__ jmp(adaptor, RelocInfo::CODE_TARGET);
}
__ Set(rbx, 0);
__ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
Handle<Code> adaptor =
- isolate->builtins()->ArgumentsAdaptorTrampoline();
+ isolate()->builtins()->ArgumentsAdaptorTrampoline();
__ Jump(adaptor, RelocInfo::CODE_TARGET);
}
__ bind(&do_call);
// Set expected number of arguments to zero (not changing rax).
__ Set(rbx, 0);
- __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
+ __ Jump(isolate()->builtins()->ArgumentsAdaptorTrampoline(),
RelocInfo::CODE_TARGET);
}
void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
- CEntryStub stub(1, kDontSaveFPRegs);
+ CEntryStub stub(isolate, 1, kDontSaveFPRegs);
stub.GetCode(isolate);
- CEntryStub save_doubles(1, kSaveFPRegs);
+ CEntryStub save_doubles(isolate, 1, kSaveFPRegs);
save_doubles.GetCode(isolate);
}
// Return result in single register (rax).
__ movp(rcx, r14); // argc.
__ movp(rdx, r15); // argv.
- __ Move(r8, ExternalReference::isolate_address(masm->isolate()));
+ __ Move(r8, ExternalReference::isolate_address(isolate()));
} else {
ASSERT_EQ(2, result_size_);
// Pass a pointer to the result location as the first argument.
// Pass a pointer to the Arguments object as the second argument.
__ movp(rdx, r14); // argc.
__ movp(r8, r15); // argv.
- __ Move(r9, ExternalReference::isolate_address(masm->isolate()));
+ __ Move(r9, ExternalReference::isolate_address(isolate()));
}
#else // _WIN64
// GCC passes arguments in rdi, rsi, rdx, rcx, r8, r9.
__ movp(rdi, r14); // argc.
__ movp(rsi, r15); // argv.
- __ Move(rdx, ExternalReference::isolate_address(masm->isolate()));
+ __ Move(rdx, ExternalReference::isolate_address(isolate()));
#endif
__ call(rbx);
// Result is in rax - do not destroy this register!
__ j(equal, &exception_returned);
ExternalReference pending_exception_address(
- Isolate::kPendingExceptionAddress, masm->isolate());
+ Isolate::kPendingExceptionAddress, isolate());
// Check that there is no pending exception, otherwise we
// should have returned the exception sentinel.
__ InitializeRootRegister();
}
- Isolate* isolate = masm->isolate();
-
// Save copies of the top frame descriptor on the stack.
- ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, isolate);
+ ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, isolate());
{
Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp);
__ Push(c_entry_fp_operand);
}
// If this is the outermost JS call, set js_entry_sp value.
- ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate);
+ ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate());
__ Load(rax, js_entry_sp);
__ testp(rax, rax);
__ j(not_zero, ¬_outermost_js);
// Caught exception: Store result (exception) in the pending exception
// field in the JSEnv and return a failure sentinel.
ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
- isolate);
+ isolate());
__ Store(pending_exception, rax);
__ LoadRoot(rax, Heap::kExceptionRootIndex);
__ jmp(&exit);
// at the time this code is generated.
if (is_construct) {
ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
- isolate);
+ isolate());
__ Load(rax, construct_entry);
} else {
- ExternalReference entry(Builtins::kJSEntryTrampoline, isolate);
+ ExternalReference entry(Builtins::kJSEntryTrampoline, isolate());
__ Load(rax, entry);
}
__ leap(kScratchRegister, FieldOperand(rax, Code::kHeaderSize));
// Longer than original string's length or negative: unsafe arguments.
__ j(above, &runtime);
// Return original string.
- Counters* counters = masm->isolate()->counters();
+ Counters* counters = isolate()->counters();
__ IncrementCounter(counters->sub_string_native(), 1);
__ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize);
__ bind(¬_original_string);
__ cmpp(rdx, rax);
__ j(not_equal, ¬_same, Label::kNear);
__ Move(rax, Smi::FromInt(EQUAL));
- Counters* counters = masm->isolate()->counters();
+ Counters* counters = isolate()->counters();
__ IncrementCounter(counters->string_compare_native(), 1);
__ ret(2 * kPointerSize);
// -- rax : right
// -- rsp[0] : return address
// -----------------------------------
- Isolate* isolate = masm->isolate();
// Load rcx with the allocation site. We stick an undefined dummy value here
// and replace it with the real allocation site later when we instantiate this
// stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate().
- __ Move(rcx, handle(isolate->heap()->undefined_value()));
+ __ Move(rcx, handle(isolate()->heap()->undefined_value()));
// Make sure that we actually patched the allocation site.
if (FLAG_debug_code) {
__ testb(rcx, Immediate(kSmiTagMask));
__ Assert(not_equal, kExpectedAllocationSite);
__ Cmp(FieldOperand(rcx, HeapObject::kMapOffset),
- isolate->factory()->allocation_site_map());
+ isolate()->factory()->allocation_site_map());
__ Assert(equal, kExpectedAllocationSite);
}
// Tail call into the stub that handles binary operations with allocation
// sites.
- BinaryOpWithAllocationSiteStub stub(state_);
+ BinaryOpWithAllocationSiteStub stub(isolate(), state_);
__ TailCallStub(&stub);
}
// Load left and right operand.
Label done, left, left_smi, right_smi;
__ JumpIfSmi(rax, &right_smi, Label::kNear);
- __ CompareMap(rax, masm->isolate()->factory()->heap_number_map());
+ __ CompareMap(rax, isolate()->factory()->heap_number_map());
__ j(not_equal, &maybe_undefined1, Label::kNear);
__ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
__ jmp(&left, Label::kNear);
__ bind(&left);
__ JumpIfSmi(rdx, &left_smi, Label::kNear);
- __ CompareMap(rdx, masm->isolate()->factory()->heap_number_map());
+ __ CompareMap(rdx, isolate()->factory()->heap_number_map());
__ j(not_equal, &maybe_undefined2, Label::kNear);
__ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
__ jmp(&done);
__ bind(&unordered);
__ bind(&generic_stub);
- ICCompareStub stub(op_, CompareIC::GENERIC, CompareIC::GENERIC,
+ ICCompareStub stub(isolate(), op_, CompareIC::GENERIC, CompareIC::GENERIC,
CompareIC::GENERIC);
- __ jmp(stub.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
+ __ jmp(stub.GetCode(isolate()), RelocInfo::CODE_TARGET);
__ bind(&maybe_undefined1);
if (Token::IsOrderedRelationalCompareOp(op_)) {
- __ Cmp(rax, masm->isolate()->factory()->undefined_value());
+ __ Cmp(rax, isolate()->factory()->undefined_value());
__ j(not_equal, &miss);
__ JumpIfSmi(rdx, &unordered);
__ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rcx);
__ bind(&maybe_undefined2);
if (Token::IsOrderedRelationalCompareOp(op_)) {
- __ Cmp(rdx, masm->isolate()->factory()->undefined_value());
+ __ Cmp(rdx, isolate()->factory()->undefined_value());
__ j(equal, &unordered);
}
{
// Call the runtime system in a fresh internal frame.
ExternalReference miss =
- ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate());
+ ExternalReference(IC_Utility(IC::kCompareIC_Miss), isolate());
FrameScope scope(masm, StackFrame::INTERNAL);
__ Push(rdx);
__ bind(&good);
}
- NameDictionaryLookupStub stub(properties, r0, r0, NEGATIVE_LOOKUP);
+ NameDictionaryLookupStub stub(masm->isolate(), properties, r0, r0,
+ NEGATIVE_LOOKUP);
__ Push(Handle<Object>(name));
__ Push(Immediate(name->Hash()));
__ CallStub(&stub);
__ j(equal, done);
}
- NameDictionaryLookupStub stub(elements, r0, r1, POSITIVE_LOOKUP);
+ NameDictionaryLookupStub stub(masm->isolate(), elements, r0, r1,
+ POSITIVE_LOOKUP);
__ Push(name);
__ movl(r0, FieldOperand(name, Name::kHashFieldOffset));
__ shrl(r0, Immediate(Name::kHashShift));
times_pointer_size,
kElementsStartOffset - kHeapObjectTag));
- __ Cmp(scratch, masm->isolate()->factory()->undefined_value());
+ __ Cmp(scratch, isolate()->factory()->undefined_value());
__ j(equal, ¬_in_dictionary);
// Stop if found the property.
void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
Isolate* isolate) {
- StoreBufferOverflowStub stub1(kDontSaveFPRegs);
+ StoreBufferOverflowStub stub1(isolate, kDontSaveFPRegs);
stub1.GetCode(isolate);
- StoreBufferOverflowStub stub2(kSaveFPRegs);
+ StoreBufferOverflowStub stub2(isolate, kSaveFPRegs);
stub2.GetCode(isolate);
}
// TODO(gc) Can we just set address arg2 in the beginning?
__ Move(arg_reg_2, address);
__ LoadAddress(arg_reg_3,
- ExternalReference::isolate_address(masm->isolate()));
+ ExternalReference::isolate_address(isolate()));
int argument_count = 3;
AllowExternalCallThatCantCauseGC scope(masm);
__ PrepareCallCFunction(argument_count);
__ CallCFunction(
- ExternalReference::incremental_marking_record_write_function(
- masm->isolate()),
+ ExternalReference::incremental_marking_record_write_function(isolate()),
argument_count);
regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_);
}
void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
- CEntryStub ces(1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs);
- __ Call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
+ CEntryStub ces(isolate(), 1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs);
+ __ Call(ces.GetCode(isolate()), RelocInfo::CODE_TARGET);
int parameter_count_offset =
StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset;
__ movp(rbx, MemOperand(rbp, parameter_count_offset));
void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
if (masm->isolate()->function_entry_hook() != NULL) {
- ProfileEntryHookStub stub;
+ ProfileEntryHookStub stub(masm->isolate());
masm->CallStub(&stub);
}
}
masm->PushCallerSaved(kSaveFPRegs, arg_reg_1, arg_reg_2);
// Call the entry hook function.
- __ Move(rax, FUNCTION_ADDR(masm->isolate()->function_entry_hook()),
+ __ Move(rax, FUNCTION_ADDR(isolate()->function_entry_hook()),
Assembler::RelocInfoNone());
AllowExternalCallThatCantCauseGC scope(masm);
static void CreateArrayDispatch(MacroAssembler* masm,
AllocationSiteOverrideMode mode) {
if (mode == DISABLE_ALLOCATION_SITES) {
- T stub(GetInitialFastElementsKind(), mode);
+ T stub(masm->isolate(), GetInitialFastElementsKind(), mode);
__ TailCallStub(&stub);
} else if (mode == DONT_OVERRIDE) {
int last_index = GetSequenceIndexFromFastElementsKind(
ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
__ cmpl(rdx, Immediate(kind));
__ j(not_equal, &next);
- T stub(kind);
+ T stub(masm->isolate(), kind);
__ TailCallStub(&stub);
__ bind(&next);
}
ElementsKind initial = GetInitialFastElementsKind();
ElementsKind holey_initial = GetHoleyElementsKind(initial);
- ArraySingleArgumentConstructorStub stub_holey(holey_initial,
+ ArraySingleArgumentConstructorStub stub_holey(masm->isolate(),
+ holey_initial,
DISABLE_ALLOCATION_SITES);
__ TailCallStub(&stub_holey);
__ bind(&normal_sequence);
- ArraySingleArgumentConstructorStub stub(initial,
+ ArraySingleArgumentConstructorStub stub(masm->isolate(),
+ initial,
DISABLE_ALLOCATION_SITES);
__ TailCallStub(&stub);
} else if (mode == DONT_OVERRIDE) {
ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
__ cmpl(rdx, Immediate(kind));
__ j(not_equal, &next);
- ArraySingleArgumentConstructorStub stub(kind);
+ ArraySingleArgumentConstructorStub stub(masm->isolate(), kind);
__ TailCallStub(&stub);
__ bind(&next);
}
TERMINAL_FAST_ELEMENTS_KIND);
for (int i = 0; i <= to_index; ++i) {
ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
- T stub(kind);
+ T stub(isolate, kind);
stub.GetCode(isolate);
if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) {
- T stub1(kind, DISABLE_ALLOCATION_SITES);
+ T stub1(isolate, kind, DISABLE_ALLOCATION_SITES);
stub1.GetCode(isolate);
}
}
ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS };
for (int i = 0; i < 2; i++) {
// For internal arrays we only need a few things
- InternalArrayNoArgumentConstructorStub stubh1(kinds[i]);
+ InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[i]);
stubh1.GetCode(isolate);
- InternalArraySingleArgumentConstructorStub stubh2(kinds[i]);
+ InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]);
stubh2.GetCode(isolate);
- InternalArrayNArgumentsConstructorStub stubh3(kinds[i]);
+ InternalArrayNArgumentsConstructorStub stubh3(isolate, kinds[i]);
stubh3.GetCode(isolate);
}
}
__ testp(rax, rax);
__ j(not_zero, ¬_zero_case);
- InternalArrayNoArgumentConstructorStub stub0(kind);
+ InternalArrayNoArgumentConstructorStub stub0(isolate(), kind);
__ TailCallStub(&stub0);
__ bind(¬_zero_case);
__ j(zero, &normal_sequence);
InternalArraySingleArgumentConstructorStub
- stub1_holey(GetHoleyElementsKind(kind));
+ stub1_holey(isolate(), GetHoleyElementsKind(kind));
__ TailCallStub(&stub1_holey);
}
__ bind(&normal_sequence);
- InternalArraySingleArgumentConstructorStub stub1(kind);
+ InternalArraySingleArgumentConstructorStub stub1(isolate(), kind);
__ TailCallStub(&stub1);
__ bind(¬_one_case);
- InternalArrayNArgumentsConstructorStub stubN(kind);
+ InternalArrayNArgumentsConstructorStub stubN(isolate(), kind);
__ TailCallStub(&stubN);
}
__ Push(scratch);
// isolate
__ Move(scratch,
- ExternalReference::isolate_address(masm->isolate()));
+ ExternalReference::isolate_address(isolate()));
__ Push(scratch);
// holder
__ Push(holder);
class StoreBufferOverflowStub: public PlatformCodeStub {
public:
- explicit StoreBufferOverflowStub(SaveFPRegsMode save_fp)
- : save_doubles_(save_fp) { }
+ StoreBufferOverflowStub(Isolate* isolate, SaveFPRegsMode save_fp)
+ : PlatformCodeStub(isolate), save_doubles_(save_fp) { }
void Generate(MacroAssembler* masm);
class SubStringStub: public PlatformCodeStub {
public:
- SubStringStub() {}
+ explicit SubStringStub(Isolate* isolate) : PlatformCodeStub(isolate) {}
private:
Major MajorKey() { return SubString; }
class StringCompareStub: public PlatformCodeStub {
public:
- StringCompareStub() {}
+ explicit StringCompareStub(Isolate* isolate) : PlatformCodeStub(isolate) {}
// Compares two flat ASCII strings and returns result in rax.
static void GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
public:
enum LookupMode { POSITIVE_LOOKUP, NEGATIVE_LOOKUP };
- NameDictionaryLookupStub(Register dictionary,
+ NameDictionaryLookupStub(Isolate* isolate,
+ Register dictionary,
Register result,
Register index,
LookupMode mode)
- : dictionary_(dictionary), result_(result), index_(index), mode_(mode) { }
+ : PlatformCodeStub(isolate),
+ dictionary_(dictionary),
+ result_(result),
+ index_(index),
+ mode_(mode) { }
void Generate(MacroAssembler* masm);
class RecordWriteStub: public PlatformCodeStub {
public:
- RecordWriteStub(Register object,
+ RecordWriteStub(Isolate* isolate,
+ Register object,
Register value,
Register address,
RememberedSetAction remembered_set_action,
SaveFPRegsMode fp_mode)
- : object_(object),
+ : PlatformCodeStub(isolate),
+ object_(object),
value_(value),
address_(address),
remembered_set_action_(remembered_set_action),
__ Set(rax, 0); // No arguments (argc == 0).
__ Move(rbx, ExternalReference::debug_break(masm->isolate()));
- CEntryStub ceb(1);
+ CEntryStub ceb(masm->isolate(), 1);
__ CallStub(&ceb);
// Restore the register values from the expression stack.
__ Push(info->scope()->GetScopeInfo());
__ CallRuntime(Runtime::kHiddenNewGlobalContext, 2);
} else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
- FastNewContextStub stub(heap_slots);
+ FastNewContextStub stub(isolate(), heap_slots);
__ CallStub(&stub);
} else {
__ Push(rdi);
} else {
type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
}
- ArgumentsAccessStub stub(type);
+ ArgumentsAccessStub stub(isolate(), type);
__ CallStub(&stub);
SetVar(arguments, rax, rbx, rdx);
!pretenure &&
scope()->is_function_scope() &&
info->num_literals() == 0) {
- FastNewClosureStub stub(info->strict_mode(), info->is_generator());
+ FastNewClosureStub stub(isolate(),
+ info->strict_mode(),
+ info->is_generator());
__ Move(rbx, info);
__ CallStub(&stub);
} else {
__ Move(rbx, Smi::FromInt(expr->literal_index()));
__ Move(rcx, constant_properties);
__ Move(rdx, Smi::FromInt(flags));
- FastCloneShallowObjectStub stub(properties_count);
+ FastCloneShallowObjectStub stub(isolate(), properties_count);
__ CallStub(&stub);
}
__ Move(rbx, Smi::FromInt(expr->literal_index()));
__ Move(rcx, constant_elements);
FastCloneShallowArrayStub stub(
+ isolate(),
FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
allocation_site_mode,
length);
__ movp(rax, FieldOperand(rbx, JSFunction::kLiteralsOffset));
__ Move(rbx, Smi::FromInt(expr->literal_index()));
__ Move(rcx, constant_elements);
- FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
+ FastCloneShallowArrayStub stub(isolate(),
+ mode,
+ allocation_site_mode, length);
__ CallStub(&stub);
}
} else {
// Store the subexpression value in the array's elements.
__ Move(rcx, Smi::FromInt(i));
- StoreArrayLiteralElementStub stub;
+ StoreArrayLiteralElementStub stub(isolate());
__ CallStub(&stub);
}
CallIC(ic, TypeFeedbackId::None());
__ movp(rdi, rax);
__ movp(Operand(rsp, 2 * kPointerSize), rdi);
- CallFunctionStub stub(1, CALL_AS_METHOD);
+ CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
__ CallStub(&stub);
__ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
__ bind(&stub_call);
__ movp(rax, rcx);
- BinaryOpICStub stub(op, mode);
+ BinaryOpICStub stub(isolate(), op, mode);
CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
patch_site.EmitPatchInfo();
__ jmp(&done, Label::kNear);
Token::Value op,
OverwriteMode mode) {
__ Pop(rdx);
- BinaryOpICStub stub(op, mode);
+ BinaryOpICStub stub(isolate(), op, mode);
JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
patch_site.EmitPatchInfo();
// Record source position for debugger.
SetSourcePosition(expr->position());
- CallFunctionStub stub(arg_count, flags);
+ CallFunctionStub stub(isolate(), arg_count, flags);
__ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
__ CallStub(&stub);
// Record source position for debugger.
SetSourcePosition(expr->position());
- CallFunctionStub stub(arg_count, CALL_AS_METHOD);
+ CallFunctionStub stub(isolate(), arg_count, CALL_AS_METHOD);
__ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
__ CallStub(&stub);
__ Move(rdx, Smi::FromInt(expr->CallFeedbackSlot()));
// Record call targets in unoptimized code.
- CallFunctionStub stub(arg_count, RECORD_CALL_TARGET);
+ CallFunctionStub stub(isolate(), arg_count, RECORD_CALL_TARGET);
__ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
__ CallStub(&stub);
RecordJSReturnSite(expr);
}
// Record source position for debugger.
SetSourcePosition(expr->position());
- CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS);
+ CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
__ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
__ CallStub(&stub);
RecordJSReturnSite(expr);
__ Move(rbx, FeedbackVector());
__ Move(rdx, Smi::FromInt(expr->CallNewFeedbackSlot()));
- CallConstructStub stub(RECORD_CALL_TARGET);
+ CallConstructStub stub(isolate(), RECORD_CALL_TARGET);
__ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL);
PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
context()->Plug(rax);
VisitForAccumulatorValue(args->at(0));
__ movp(rdx, rax);
__ Move(rax, Smi::FromInt(info_->scope()->num_parameters()));
- ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
+ ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
__ CallStub(&stub);
context()->Plug(rax);
}
void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
// Load the arguments on the stack and call the stub.
- SubStringStub stub;
+ SubStringStub stub(isolate());
ZoneList<Expression*>* args = expr->arguments();
ASSERT(args->length() == 3);
VisitForStackValue(args->at(0));
void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
// Load the arguments on the stack and call the stub.
- RegExpExecStub stub;
+ RegExpExecStub stub(isolate());
ZoneList<Expression*>* args = expr->arguments();
ASSERT(args->length() == 4);
VisitForStackValue(args->at(0));
ASSERT(args->length() == 2);
VisitForStackValue(args->at(0));
VisitForStackValue(args->at(1));
- MathPowStub stub(MathPowStub::ON_STACK);
+ MathPowStub stub(isolate(), MathPowStub::ON_STACK);
__ CallStub(&stub);
context()->Plug(rax);
}
// Load the argument into rax and call the stub.
VisitForAccumulatorValue(args->at(0));
- NumberToStringStub stub;
+ NumberToStringStub stub(isolate());
__ CallStub(&stub);
context()->Plug(rax);
}
VisitForAccumulatorValue(args->at(1));
__ Pop(rdx);
- StringAddStub stub(STRING_ADD_CHECK_BOTH, NOT_TENURED);
+ StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
__ CallStub(&stub);
context()->Plug(rax);
}
VisitForStackValue(args->at(0));
VisitForStackValue(args->at(1));
- StringCompareStub stub;
+ StringCompareStub stub(isolate());
__ CallStub(&stub);
context()->Plug(rax);
}
void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
- RegExpConstructResultStub stub;
+ RegExpConstructResultStub stub(isolate());
ZoneList<Expression*>* args = expr->arguments();
ASSERT(args->length() == 3);
VisitForStackValue(args->at(0));
// Record source position of the IC call.
SetSourcePosition(expr->position());
- CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS);
+ CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
__ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
__ CallStub(&stub);
__ bind(&slow);
}
- ToNumberStub convert_stub;
+ ToNumberStub convert_stub(isolate());
__ CallStub(&convert_stub);
// Save result for postfix expressions.
__ bind(&stub_call);
__ movp(rdx, rax);
__ Move(rax, Smi::FromInt(1));
- BinaryOpICStub stub(expr->binary_op(), NO_OVERWRITE);
+ BinaryOpICStub stub(isolate(), expr->binary_op(), NO_OVERWRITE);
CallIC(stub.GetCode(isolate()), expr->CountBinOpFeedbackId());
patch_site.EmitPatchInfo();
__ bind(&done);
case Token::INSTANCEOF: {
VisitForStackValue(expr->right());
- InstanceofStub stub(InstanceofStub::kNoFlags);
+ InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
__ CallStub(&stub);
PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
__ testp(rax, rax);
Comment(";;; Allocate local context");
// Argument to NewContext is the function, which is still in rdi.
if (heap_slots <= FastNewContextStub::kMaximumSlots) {
- FastNewContextStub stub(heap_slots);
+ FastNewContextStub stub(isolate(), heap_slots);
__ CallStub(&stub);
} else {
__ Push(rdi);
ASSERT(ToRegister(instr->result()).is(rax));
switch (instr->hydrogen()->major_key()) {
case CodeStub::RegExpExec: {
- RegExpExecStub stub;
+ RegExpExecStub stub(isolate());
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
break;
}
case CodeStub::SubString: {
- SubStringStub stub;
+ SubStringStub stub(isolate());
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
break;
}
case CodeStub::StringCompare: {
- StringCompareStub stub;
+ StringCompareStub stub(isolate());
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
break;
}
ASSERT(ToRegister(instr->right()).is(rax));
ASSERT(ToRegister(instr->result()).is(rax));
- BinaryOpICStub stub(instr->op(), NO_OVERWRITE);
+ BinaryOpICStub stub(isolate(), instr->op(), NO_OVERWRITE);
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
}
void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
ASSERT(ToRegister(instr->context()).is(rsi));
- InstanceofStub stub(InstanceofStub::kNoFlags);
+ InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
__ Push(ToRegister(instr->left()));
__ Push(ToRegister(instr->right()));
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
PushSafepointRegistersScope scope(this);
InstanceofStub::Flags flags = static_cast<InstanceofStub::Flags>(
InstanceofStub::kNoFlags | InstanceofStub::kCallSiteInlineCheck);
- InstanceofStub stub(flags);
+ InstanceofStub stub(isolate(), flags);
__ Push(ToRegister(instr->value()));
__ Push(instr->function());
ASSERT(ToDoubleRegister(instr->result()).is(xmm3));
if (exponent_type.IsSmi()) {
- MathPowStub stub(MathPowStub::TAGGED);
+ MathPowStub stub(isolate(), MathPowStub::TAGGED);
__ CallStub(&stub);
} else if (exponent_type.IsTagged()) {
Label no_deopt;
__ CmpObjectType(exponent, HEAP_NUMBER_TYPE, rcx);
DeoptimizeIf(not_equal, instr->environment());
__ bind(&no_deopt);
- MathPowStub stub(MathPowStub::TAGGED);
+ MathPowStub stub(isolate(), MathPowStub::TAGGED);
__ CallStub(&stub);
} else if (exponent_type.IsInteger32()) {
- MathPowStub stub(MathPowStub::INTEGER);
+ MathPowStub stub(isolate(), MathPowStub::INTEGER);
__ CallStub(&stub);
} else {
ASSERT(exponent_type.IsDouble());
- MathPowStub stub(MathPowStub::DOUBLE);
+ MathPowStub stub(isolate(), MathPowStub::DOUBLE);
__ CallStub(&stub);
}
}
ASSERT(ToRegister(instr->result()).is(rax));
int arity = instr->arity();
- CallFunctionStub stub(arity, instr->hydrogen()->function_flags());
+ CallFunctionStub stub(isolate(), arity, instr->hydrogen()->function_flags());
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
}
__ Set(rax, instr->arity());
// No cell in ebx for construct type feedback in optimized code
__ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
- CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
+ CallConstructStub stub(isolate(), NO_CALL_FUNCTION_FLAGS);
CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
}
: DONT_OVERRIDE;
if (instr->arity() == 0) {
- ArrayNoArgumentConstructorStub stub(kind, override_mode);
+ ArrayNoArgumentConstructorStub stub(isolate(), kind, override_mode);
CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
} else if (instr->arity() == 1) {
Label done;
__ j(zero, &packed_case, Label::kNear);
ElementsKind holey_kind = GetHoleyElementsKind(kind);
- ArraySingleArgumentConstructorStub stub(holey_kind, override_mode);
+ ArraySingleArgumentConstructorStub stub(isolate(),
+ holey_kind,
+ override_mode);
CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
__ jmp(&done, Label::kNear);
__ bind(&packed_case);
}
- ArraySingleArgumentConstructorStub stub(kind, override_mode);
+ ArraySingleArgumentConstructorStub stub(isolate(), kind, override_mode);
CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
__ bind(&done);
} else {
- ArrayNArgumentsConstructorStub stub(kind, override_mode);
+ ArrayNArgumentsConstructorStub stub(isolate(), kind, override_mode);
CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
}
}
PushSafepointRegistersScope scope(this);
__ Move(rbx, to_map);
bool is_js_array = from_map->instance_type() == JS_ARRAY_TYPE;
- TransitionElementsKindStub stub(from_kind, to_kind, is_js_array);
+ TransitionElementsKindStub stub(isolate(), from_kind, to_kind, is_js_array);
__ CallStub(&stub);
RecordSafepointWithLazyDeopt(instr, RECORD_SAFEPOINT_WITH_REGISTERS, 0);
}
ASSERT(ToRegister(instr->context()).is(rsi));
ASSERT(ToRegister(instr->left()).is(rdx));
ASSERT(ToRegister(instr->right()).is(rax));
- StringAddStub stub(instr->hydrogen()->flags(),
+ StringAddStub stub(isolate(),
+ instr->hydrogen()->flags(),
instr->hydrogen()->pretenure_flag());
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
}
// space for nested functions that don't need literals cloning.
bool pretenure = instr->hydrogen()->pretenure();
if (!pretenure && instr->hydrogen()->has_no_literals()) {
- FastNewClosureStub stub(instr->hydrogen()->strict_mode(),
+ FastNewClosureStub stub(isolate(),
+ instr->hydrogen()->strict_mode(),
instr->hydrogen()->is_generator());
__ Move(rbx, instr->hydrogen()->shared_info());
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
j(equal, &done, Label::kNear);
}
StoreBufferOverflowStub store_buffer_overflow =
- StoreBufferOverflowStub(save_fp);
+ StoreBufferOverflowStub(isolate(), save_fp);
CallStub(&store_buffer_overflow);
if (and_then == kReturnAtEnd) {
ret(0);
&done,
Label::kNear);
- RecordWriteStub stub(object, value, address, remembered_set_action, fp_mode);
+ RecordWriteStub stub(isolate(), object, value, address, remembered_set_action,
+ fp_mode);
CallStub(&stub);
bind(&done);
// smarter.
Set(rax, num_arguments);
LoadAddress(rbx, ExternalReference(f, isolate()));
- CEntryStub ces(f->result_size, save_doubles);
+ CEntryStub ces(isolate(), f->result_size, save_doubles);
CallStub(&ces);
}
Set(rax, num_arguments);
LoadAddress(rbx, ext);
- CEntryStub stub(1);
+ CEntryStub stub(isolate(), 1);
CallStub(&stub);
}
int result_size) {
// Set the entry point and jump to the C entry runtime stub.
LoadAddress(rbx, ext);
- CEntryStub ces(result_size);
+ CEntryStub ces(isolate(), result_size);
jmp(ces.GetCode(isolate()), RelocInfo::CODE_TARGET);
}
void MacroAssembler::SlowTruncateToI(Register result_reg,
Register input_reg,
int offset) {
- DoubleToIStub stub(input_reg, result_reg, offset, true);
+ DoubleToIStub stub(isolate(), input_reg, result_reg, offset, true);
call(stub.GetCode(isolate()), RelocInfo::CODE_TARGET);
}
void MacroAssembler::DebugBreak() {
Set(rax, 0); // No arguments.
LoadAddress(rbx, ExternalReference(Runtime::kDebugBreak, isolate()));
- CEntryStub ces(1);
+ CEntryStub ces(isolate(), 1);
ASSERT(AllowThisStubCall(&ces));
Call(ces.GetCode(isolate()), RelocInfo::DEBUG_BREAK);
}
api_function_address, function_address, RelocInfo::EXTERNAL_REFERENCE);
// Jump to stub.
- CallApiFunctionStub stub(is_store, call_data_undefined, argc);
+ CallApiFunctionStub stub(isolate, is_store, call_data_undefined, argc);
__ TailCallStub(&stub);
}
Representation representation) {
if (!reg.is(receiver())) __ movp(receiver(), reg);
if (kind() == Code::LOAD_IC) {
- LoadFieldStub stub(field.is_inobject(holder),
+ LoadFieldStub stub(isolate(),
+ field.is_inobject(holder),
field.translate(holder),
representation);
GenerateTailCall(masm(), stub.GetCode(isolate()));
} else {
- KeyedLoadFieldStub stub(field.is_inobject(holder),
+ KeyedLoadFieldStub stub(isolate(),
+ field.is_inobject(holder),
field.translate(holder),
representation);
GenerateTailCall(masm(), stub.GetCode(isolate()));
Address getter_address = v8::ToCData<Address>(callback->getter());
__ Move(api_function_address, getter_address, RelocInfo::EXTERNAL_REFERENCE);
- CallApiGetterStub stub;
+ CallApiGetterStub stub(isolate());
__ TailCallStub(&stub);
}
CHECK(buffer);
HandleScope handles(isolate);
MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size));
- DoubleToIStub stub(source_reg, destination_reg, 0, true, inline_fastpath);
+ DoubleToIStub stub(isolate, source_reg, destination_reg, 0, true,
+ inline_fastpath);
byte* start = stub.GetCode(isolate)->instruction_start();
Label done;
CHECK(buffer);
HandleScope handles(isolate);
MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size));
- DoubleToIStub stub(source_reg, destination_reg, 0, true, inline_fastpath);
+ DoubleToIStub stub(isolate, source_reg, destination_reg, 0, true,
+ inline_fastpath);
byte* start = stub.GetCode(isolate)->instruction_start();
Label done;
MacroAssembler assm(isolate, buffer, static_cast<int>(actual_size));
int offset =
source_reg.is(esp) ? 0 : (HeapNumber::kValueOffset - kSmiTagSize);
- DoubleToIStub stub(source_reg, destination_reg, offset, true);
+ DoubleToIStub stub(isolate, source_reg, destination_reg, offset, true);
byte* start = stub.GetCode(isolate)->instruction_start();
__ push(ebx);
MacroAssembler assm(isolate, buffer, static_cast<int>(actual_size));
int offset =
source_reg.is(rsp) ? 0 : (HeapNumber::kValueOffset - kSmiTagSize);
- DoubleToIStub stub(source_reg, destination_reg, offset, true);
+ DoubleToIStub stub(isolate, source_reg, destination_reg, offset, true);
byte* start = stub.GetCode(isolate)->instruction_start();
__ pushq(rbx);