}
-void LoadFieldStub::InitializeInterfaceDescriptor(
+void RegExpConstructResultStub::InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor) {
- static Register registers[] = { edx };
- descriptor->register_param_count_ = 1;
+ static Register registers[] = { ecx, ebx, eax };
+ descriptor->register_param_count_ = 3;
descriptor->register_params_ = registers;
- descriptor->deoptimization_handler_ = NULL;
+ descriptor->deoptimization_handler_ =
+ Runtime::FunctionForId(Runtime::kRegExpConstructResult)->entry;
}
-void KeyedLoadFieldStub::InitializeInterfaceDescriptor(
+void LoadFieldStub::InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor) {
static Register registers[] = { edx };
}
-void KeyedArrayCallStub::InitializeInterfaceDescriptor(
+void KeyedLoadFieldStub::InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor) {
- static Register registers[] = { ecx };
+ static Register registers[] = { edx };
descriptor->register_param_count_ = 1;
descriptor->register_params_ = registers;
- descriptor->continuation_type_ = TAIL_CALL_CONTINUATION;
- descriptor->handler_arguments_mode_ = PASS_ARGUMENTS;
- descriptor->deoptimization_handler_ =
- FUNCTION_ADDR(KeyedCallIC_MissFromStubFailure);
+ descriptor->deoptimization_handler_ = NULL;
}
descriptor->register_params_ = registers;
descriptor->param_representations_ = representations;
}
+ {
+ CallInterfaceDescriptor* descriptor =
+ isolate->call_descriptor(Isolate::ApiFunctionCall);
+ static Register registers[] = { eax, // callee
+ ebx, // call_data
+ ecx, // holder
+ edx, // api_function_address
+ esi, // context
+ };
+ static Representation representations[] = {
+ Representation::Tagged(), // callee
+ Representation::Tagged(), // call_data
+ Representation::Tagged(), // holder
+ Representation::External(), // api_function_address
+ Representation::Tagged(), // context
+ };
+ descriptor->register_param_count_ = 5;
+ descriptor->register_params_ = registers;
+ descriptor->param_representations_ = representations;
+ }
}
}
-void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
- // Stack layout on entry:
- //
- // [esp + (1 * kPointerSize)]: function
- // [esp + (2 * kPointerSize)]: serialized scope info
-
- // Try to allocate the context in new space.
- Label gc;
- int length = slots_ + Context::MIN_CONTEXT_SLOTS;
- __ Allocate(FixedArray::SizeFor(length), eax, ebx, ecx, &gc, TAG_OBJECT);
-
- // Get the function or sentinel from the stack.
- __ mov(ecx, Operand(esp, 1 * kPointerSize));
-
- // Get the serialized scope info from the stack.
- __ mov(ebx, Operand(esp, 2 * kPointerSize));
-
- // Set up the object header.
- Factory* factory = masm->isolate()->factory();
- __ mov(FieldOperand(eax, HeapObject::kMapOffset),
- factory->block_context_map());
- __ mov(FieldOperand(eax, Context::kLengthOffset),
- Immediate(Smi::FromInt(length)));
-
- // If this block context is nested in the native context we get a smi
- // sentinel instead of a function. The block context should get the
- // canonical empty function of the native context as its closure which
- // we still have to look up.
- Label after_sentinel;
- __ JumpIfNotSmi(ecx, &after_sentinel, Label::kNear);
- if (FLAG_debug_code) {
- __ cmp(ecx, 0);
- __ Assert(equal, kExpected0AsASmiSentinel);
- }
- __ mov(ecx, GlobalObjectOperand());
- __ mov(ecx, FieldOperand(ecx, GlobalObject::kNativeContextOffset));
- __ mov(ecx, ContextOperand(ecx, Context::CLOSURE_INDEX));
- __ bind(&after_sentinel);
-
- // Set up the fixed slots.
- __ mov(ContextOperand(eax, Context::CLOSURE_INDEX), ecx);
- __ mov(ContextOperand(eax, Context::PREVIOUS_INDEX), esi);
- __ mov(ContextOperand(eax, Context::EXTENSION_INDEX), ebx);
-
- // Copy the global object from the previous context.
- __ mov(ebx, ContextOperand(esi, Context::GLOBAL_OBJECT_INDEX));
- __ mov(ContextOperand(eax, Context::GLOBAL_OBJECT_INDEX), ebx);
-
- // Initialize the rest of the slots to the hole value.
- if (slots_ == 1) {
- __ mov(ContextOperand(eax, Context::MIN_CONTEXT_SLOTS),
- factory->the_hole_value());
- } else {
- __ mov(ebx, factory->the_hole_value());
- for (int i = 0; i < slots_; i++) {
- __ mov(ContextOperand(eax, i + Context::MIN_CONTEXT_SLOTS), ebx);
- }
- }
-
- // Return and remove the on-stack parameters.
- __ mov(esi, eax);
- __ ret(2 * kPointerSize);
-
- // Need to collect. Call into runtime system.
- __ bind(&gc);
- __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1);
-}
-
-
void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
// We don't allow a GC during a store buffer overflow so there is no need to
// store the registers in any particular way, but we do have to store and
}
-void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
- const int kMaxInlineLength = 100;
- Label slowcase;
- Label done;
- __ mov(ebx, Operand(esp, kPointerSize * 3));
- __ JumpIfNotSmi(ebx, &slowcase);
- __ cmp(ebx, Immediate(Smi::FromInt(kMaxInlineLength)));
- __ j(above, &slowcase);
- // Smi-tagging is equivalent to multiplying by 2.
- STATIC_ASSERT(kSmiTag == 0);
- STATIC_ASSERT(kSmiTagSize == 1);
- // Allocate RegExpResult followed by FixedArray with size in ebx.
- // JSArray: [Map][empty properties][Elements][Length-smi][index][input]
- // Elements: [Map][Length][..elements..]
- __ Allocate(JSRegExpResult::kSize + FixedArray::kHeaderSize,
- times_pointer_size,
- ebx, // In: Number of elements as a smi
- REGISTER_VALUE_IS_SMI,
- eax, // Out: Start of allocation (tagged).
- ecx, // Out: End of allocation.
- edx, // Scratch register
- &slowcase,
- TAG_OBJECT);
- // eax: Start of allocated area, object-tagged.
-
- // Set JSArray map to global.regexp_result_map().
- // Set empty properties FixedArray.
- // Set elements to point to FixedArray allocated right after the JSArray.
- // Interleave operations for better latency.
- __ mov(edx, ContextOperand(esi, Context::GLOBAL_OBJECT_INDEX));
- Factory* factory = masm->isolate()->factory();
- __ mov(ecx, Immediate(factory->empty_fixed_array()));
- __ lea(ebx, Operand(eax, JSRegExpResult::kSize));
- __ mov(edx, FieldOperand(edx, GlobalObject::kNativeContextOffset));
- __ mov(FieldOperand(eax, JSObject::kElementsOffset), ebx);
- __ mov(FieldOperand(eax, JSObject::kPropertiesOffset), ecx);
- __ mov(edx, ContextOperand(edx, Context::REGEXP_RESULT_MAP_INDEX));
- __ mov(FieldOperand(eax, HeapObject::kMapOffset), edx);
-
- // Set input, index and length fields from arguments.
- __ mov(ecx, Operand(esp, kPointerSize * 1));
- __ mov(FieldOperand(eax, JSRegExpResult::kInputOffset), ecx);
- __ mov(ecx, Operand(esp, kPointerSize * 2));
- __ mov(FieldOperand(eax, JSRegExpResult::kIndexOffset), ecx);
- __ mov(ecx, Operand(esp, kPointerSize * 3));
- __ mov(FieldOperand(eax, JSArray::kLengthOffset), ecx);
-
- // Fill out the elements FixedArray.
- // eax: JSArray.
- // ebx: FixedArray.
- // ecx: Number of elements in array, as smi.
-
- // Set map.
- __ mov(FieldOperand(ebx, HeapObject::kMapOffset),
- Immediate(factory->fixed_array_map()));
- // Set length.
- __ mov(FieldOperand(ebx, FixedArray::kLengthOffset), ecx);
- // Fill contents of fixed-array with undefined.
- __ SmiUntag(ecx);
- __ mov(edx, Immediate(factory->undefined_value()));
- __ lea(ebx, FieldOperand(ebx, FixedArray::kHeaderSize));
- // Fill fixed array elements with undefined.
- // eax: JSArray.
- // ecx: Number of elements to fill.
- // ebx: Start of elements in FixedArray.
- // edx: undefined.
- Label loop;
- __ test(ecx, ecx);
- __ bind(&loop);
- __ j(less_equal, &done, Label::kNear); // Jump if ecx is negative or zero.
- __ sub(ecx, Immediate(1));
- __ mov(Operand(ebx, ecx, times_pointer_size, 0), edx);
- __ jmp(&loop);
-
- __ bind(&done);
- __ ret(3 * kPointerSize);
-
- __ bind(&slowcase);
- __ TailCallRuntime(Runtime::kRegExpConstructResult, 3, 1);
-}
-
-
static int NegativeComparisonResult(Condition cc) {
ASSERT(cc != equal);
ASSERT((cc == less) || (cc == less_equal)
// ebx : cache cell for call target
// edi : the function to call
Isolate* isolate = masm->isolate();
- Label slow, non_function;
+ Label slow, non_function, wrap, cont;
- // Check that the function really is a JavaScript function.
- __ JumpIfSmi(edi, &non_function);
+ if (NeedsChecks()) {
+ // Check that the function really is a JavaScript function.
+ __ JumpIfSmi(edi, &non_function);
- // Goto slow case if we do not have a function.
- __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
- __ j(not_equal, &slow);
+ // Goto slow case if we do not have a function.
+ __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
+ __ j(not_equal, &slow);
- if (RecordCallTarget()) {
- GenerateRecordCallTarget(masm);
+ if (RecordCallTarget()) {
+ GenerateRecordCallTarget(masm);
+ }
}
// Fast-case: Just invoke the function.
ParameterCount actual(argc_);
- __ InvokeFunction(edi, actual, JUMP_FUNCTION, NullCallWrapper());
+ if (CallAsMethod()) {
+ if (NeedsChecks()) {
+ // Do not transform the receiver for strict mode functions.
+ __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
+ __ test_b(FieldOperand(ecx, SharedFunctionInfo::kStrictModeByteOffset),
+ 1 << SharedFunctionInfo::kStrictModeBitWithinByte);
+ __ j(not_equal, &cont);
+
+ // Do not transform the receiver for natives (shared already in ecx).
+ __ test_b(FieldOperand(ecx, SharedFunctionInfo::kNativeByteOffset),
+ 1 << SharedFunctionInfo::kNativeBitWithinByte);
+ __ j(not_equal, &cont);
+ }
- // Slow-case: Non-function called.
- __ bind(&slow);
- if (RecordCallTarget()) {
- // If there is a call target cache, mark it megamorphic in the
- // non-function case. MegamorphicSentinel is an immortal immovable
- // object (undefined) so no write barrier is needed.
- __ mov(FieldOperand(ebx, Cell::kValueOffset),
- Immediate(TypeFeedbackCells::MegamorphicSentinel(isolate)));
+ // Load the receiver from the stack.
+ __ mov(eax, Operand(esp, (argc_ + 1) * kPointerSize));
+
+ if (NeedsChecks()) {
+ __ JumpIfSmi(eax, &wrap);
+
+ __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
+ __ j(below, &wrap);
+ } else {
+ __ jmp(&wrap);
+ }
+
+ __ bind(&cont);
}
- // Check for function proxy.
- __ CmpInstanceType(ecx, JS_FUNCTION_PROXY_TYPE);
- __ j(not_equal, &non_function);
- __ pop(ecx);
- __ push(edi); // put proxy as additional argument under return address
- __ push(ecx);
- __ Set(eax, Immediate(argc_ + 1));
- __ Set(ebx, Immediate(0));
- __ GetBuiltinEntry(edx, Builtins::CALL_FUNCTION_PROXY);
- {
+
+ __ InvokeFunction(edi, actual, JUMP_FUNCTION, NullCallWrapper());
+
+ if (NeedsChecks()) {
+ // Slow-case: Non-function called.
+ __ bind(&slow);
+ if (RecordCallTarget()) {
+ // If there is a call target cache, mark it megamorphic in the
+ // non-function case. MegamorphicSentinel is an immortal immovable
+ // object (undefined) so no write barrier is needed.
+ __ mov(FieldOperand(ebx, Cell::kValueOffset),
+ Immediate(TypeFeedbackCells::MegamorphicSentinel(isolate)));
+ }
+ // Check for function proxy.
+ __ CmpInstanceType(ecx, JS_FUNCTION_PROXY_TYPE);
+ __ j(not_equal, &non_function);
+ __ pop(ecx);
+ __ push(edi); // put proxy as additional argument under return address
+ __ push(ecx);
+ __ Set(eax, Immediate(argc_ + 1));
+ __ Set(ebx, Immediate(0));
+ __ GetBuiltinEntry(edx, Builtins::CALL_FUNCTION_PROXY);
+ {
+ Handle<Code> adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline();
+ __ jmp(adaptor, RelocInfo::CODE_TARGET);
+ }
+
+ // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
+ // of the original receiver from the call site).
+ __ bind(&non_function);
+ __ mov(Operand(esp, (argc_ + 1) * kPointerSize), edi);
+ __ Set(eax, Immediate(argc_));
+ __ Set(ebx, Immediate(0));
+ __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION);
Handle<Code> adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline();
__ jmp(adaptor, RelocInfo::CODE_TARGET);
}
- // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
- // of the original receiver from the call site).
- __ bind(&non_function);
- __ mov(Operand(esp, (argc_ + 1) * kPointerSize), edi);
- __ Set(eax, Immediate(argc_));
- __ Set(ebx, Immediate(0));
- __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION);
- Handle<Code> adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline();
- __ jmp(adaptor, RelocInfo::CODE_TARGET);
+ if (CallAsMethod()) {
+ __ bind(&wrap);
+ // Wrap the receiver and patch it back onto the stack.
+ { FrameScope frame_scope(masm, StackFrame::INTERNAL);
+ __ push(edi);
+ __ push(eax);
+ __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
+ __ pop(edi);
+ }
+ __ mov(Operand(esp, (argc_ + 1) * kPointerSize), eax);
+ __ jmp(&cont);
+ }
}
}
-void StubFailureTailCallTrampolineStub::Generate(MacroAssembler* masm) {
- CEntryStub ces(1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs);
- __ call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
- __ mov(edi, eax);
- int parameter_count_offset =
- StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset;
- __ mov(eax, MemOperand(ebp, parameter_count_offset));
- // The parameter count above includes the receiver for the arguments passed to
- // the deoptimization handler. Subtract the receiver for the parameter count
- // for the call.
- __ sub(eax, Immediate(1));
- masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
- ParameterCount argument_count(eax);
- __ InvokeFunction(edi, argument_count, JUMP_FUNCTION, NullCallWrapper());
-}
-
-
void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
if (masm->isolate()->function_entry_hook() != NULL) {
ProfileEntryHookStub stub;
}
+void CallApiFunctionStub::Generate(MacroAssembler* masm) {
+ // ----------- S t a t e -------------
+ // -- eax : callee
+ // -- ebx : call_data
+ // -- ecx : holder
+ // -- edx : api_function_address
+ // -- esi : context
+ // --
+ // -- esp[0] : return address
+ // -- esp[4] : last argument
+ // -- ...
+ // -- esp[argc * 4] : first argument
+ // -- esp[(argc + 1) * 4] : receiver
+ // -----------------------------------
+
+ Register callee = eax;
+ Register call_data = ebx;
+ Register holder = ecx;
+ Register api_function_address = edx;
+ Register return_address = edi;
+ Register context = esi;
+
+ int argc = ArgumentBits::decode(bit_field_);
+ bool restore_context = RestoreContextBits::decode(bit_field_);
+ bool call_data_undefined = CallDataUndefinedBits::decode(bit_field_);
+
+ typedef FunctionCallbackArguments FCA;
+
+ STATIC_ASSERT(FCA::kContextSaveIndex == 6);
+ STATIC_ASSERT(FCA::kCalleeIndex == 5);
+ STATIC_ASSERT(FCA::kDataIndex == 4);
+ STATIC_ASSERT(FCA::kReturnValueOffset == 3);
+ STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2);
+ STATIC_ASSERT(FCA::kIsolateIndex == 1);
+ STATIC_ASSERT(FCA::kHolderIndex == 0);
+ STATIC_ASSERT(FCA::kArgsLength == 7);
+
+ Isolate* isolate = masm->isolate();
+
+ __ pop(return_address);
+
+ // context save
+ __ push(context);
+ // load context from callee
+ __ mov(context, FieldOperand(callee, JSFunction::kContextOffset));
+
+ // callee
+ __ push(callee);
+
+ // call data
+ __ push(call_data);
+
+ Register scratch = call_data;
+ if (!call_data_undefined) {
+ // return value
+ __ push(Immediate(isolate->factory()->undefined_value()));
+ // return value default
+ __ push(Immediate(isolate->factory()->undefined_value()));
+ } else {
+ // return value
+ __ push(scratch);
+ // return value default
+ __ push(scratch);
+ }
+ // isolate
+ __ push(Immediate(reinterpret_cast<int>(isolate)));
+ // holder
+ __ push(holder);
+
+ __ mov(scratch, esp);
+
+ // return address
+ __ push(return_address);
+
+ // API function gets reference to the v8::Arguments. If CPU profiler
+ // is enabled wrapper function will be called and we need to pass
+ // address of the callback as additional parameter, always allocate
+ // space for it.
+ const int kApiArgc = 1 + 1;
+
+ // Allocate the v8::Arguments structure in the arguments' space since
+ // it's not controlled by GC.
+ const int kApiStackSpace = 4;
+
+ __ PrepareCallApiFunction(kApiArgc + kApiStackSpace);
+
+ // FunctionCallbackInfo::implicit_args_.
+ __ mov(ApiParameterOperand(2), scratch);
+ __ add(scratch, Immediate((argc + FCA::kArgsLength - 1) * kPointerSize));
+ // FunctionCallbackInfo::values_.
+ __ mov(ApiParameterOperand(3), scratch);
+ // FunctionCallbackInfo::length_.
+ __ Set(ApiParameterOperand(4), Immediate(argc));
+ // FunctionCallbackInfo::is_construct_call_.
+ __ Set(ApiParameterOperand(5), Immediate(0));
+
+ // v8::InvocationCallback's argument.
+ __ lea(scratch, ApiParameterOperand(2));
+ __ mov(ApiParameterOperand(0), scratch);
+
+ Address thunk_address = FUNCTION_ADDR(&InvokeFunctionCallback);
+
+ Operand context_restore_operand(ebp,
+ (2 + FCA::kContextSaveIndex) * kPointerSize);
+ Operand return_value_operand(ebp,
+ (2 + FCA::kReturnValueOffset) * kPointerSize);
+ __ CallApiFunctionAndReturn(api_function_address,
+ thunk_address,
+ ApiParameterOperand(1),
+ argc + FCA::kArgsLength + 1,
+ return_value_operand,
+ restore_context ?
+ &context_restore_operand : NULL);
+}
+
+
+void CallApiGetterStub::Generate(MacroAssembler* masm) {
+ // ----------- S t a t e -------------
+ // -- esp[0] : return address
+ // -- esp[4] : name
+ // -- esp[8 - kArgsLength*4] : PropertyCallbackArguments object
+ // -- ...
+ // -- edx : api_function_address
+ // -----------------------------------
+
+ // array for v8::Arguments::values_, handler for name and pointer
+ // to the values (it considered as smi in GC).
+ const int kStackSpace = PropertyCallbackArguments::kArgsLength + 2;
+ // Allocate space for opional callback address parameter in case
+ // CPU profiler is active.
+ const int kApiArgc = 2 + 1;
+
+ Register api_function_address = edx;
+ Register scratch = ebx;
+
+ // load address of name
+ __ lea(scratch, Operand(esp, 1 * kPointerSize));
+
+ __ PrepareCallApiFunction(kApiArgc);
+ __ mov(ApiParameterOperand(0), scratch); // name.
+ __ add(scratch, Immediate(kPointerSize));
+ __ mov(ApiParameterOperand(1), scratch); // arguments pointer.
+
+ Address thunk_address = FUNCTION_ADDR(&InvokeAccessorGetterCallback);
+
+ __ CallApiFunctionAndReturn(api_function_address,
+ thunk_address,
+ ApiParameterOperand(2),
+ kStackSpace,
+ Operand(ebp, 7 * kPointerSize),
+ NULL);
+}
+
+
#undef __
} } // namespace v8::internal