// -- r0 : number of arguments excluding receiver
// -- r1 : called function (only guaranteed when
// extra_args requires it)
- // -- cp : context
// -- sp[0] : last argument
// -- ...
// -- sp[4 * (argc - 1)] : first argument (argc == r0)
// -- sp[4 * argc] : receiver
// -----------------------------------
+ __ AssertFunction(r1);
+
+ // Make sure we operate in the context of the called function (for example
+ // ConstructStubs implemented in C++ will be run in the context of the caller
+ // instead of the callee, due to the way that [[Construct]] is defined for
+ // ordinary functions).
+ // TODO(bmeurer): Can we make this more robust?
+ __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
// Insert extra arguments.
int num_extra_args = 0;
static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
bool is_construct) {
// Called from Generate_JS_Entry
- // r0: code entry
+ // r0: new.target
// r1: function
// r2: receiver
// r3: argc
{
FrameScope scope(masm, StackFrame::INTERNAL);
- // Set up the context from the function argument.
- __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
+ // Setup the context (we need to use the caller context from the isolate).
+ ExternalReference context_address(Isolate::kContextAddress,
+ masm->isolate());
+ __ mov(cp, Operand(context_address));
+ __ ldr(cp, MemOperand(cp));
__ InitializeRootRegister();
// Push the function and the receiver onto the stack.
- __ push(r1);
- __ push(r2);
+ __ Push(r1, r2);
// Check if we have enough stack space to push all arguments.
// The function is the first thing that was pushed above after entering
// Clobbers r2.
Generate_CheckStackOverflow(masm, kFunctionOffset, r3, kArgcIsUntaggedInt);
+ // Remember new.target.
+ __ mov(r5, r0);
+
// Copy arguments to the stack in a loop.
// r1: function
// r3: argc
__ cmp(r4, r2);
__ b(ne, &loop);
+ // Setup new.target and argc.
+ __ mov(r0, Operand(r3));
+ __ mov(r3, Operand(r5));
+
// Initialize all JavaScript callee-saved registers, since they will be seen
// by the garbage collector as part of handlers.
__ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
__ mov(r9, Operand(r4));
}
- // Invoke the code and pass argc as r0.
- __ mov(r0, Operand(r3));
- if (is_construct) {
- // No type feedback cell is available
- __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
- CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS);
- __ CallStub(&stub);
- } else {
- __ Call(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
- }
+ // Invoke the code.
+ Handle<Code> builtin = is_construct
+ ? masm->isolate()->builtins()->Construct()
+ : masm->isolate()->builtins()->Call();
+ __ Call(builtin, RelocInfo::CODE_TARGET);
+
// Exit the JS frame and remove the parameters (except function), and
// return.
// Respect ABI stack constraint.
// static
+void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
+ // ----------- S t a t e -------------
+ // -- r0 : the number of arguments (not including the receiver)
+ // -- r1 : the constructor to call (checked to be a JSFunction)
+ // -- r3 : the original constructor (checked to be a JSFunction)
+ // -----------------------------------
+ __ AssertFunction(r1);
+ __ AssertFunction(r3);
+
+ // Calling convention for function specific ConstructStubs require
+ // r2 to contain either an AllocationSite or undefined.
+ __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
+
+ // Tail call to the function-specific construct stub (still in the caller
+ // context at this point).
+ __ ldr(r4, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
+ __ ldr(r4, FieldMemOperand(r4, SharedFunctionInfo::kConstructStubOffset));
+ __ add(pc, r4, Operand(Code::kHeaderSize - kHeapObjectTag));
+}
+
+
+// static
+void Builtins::Generate_Construct(MacroAssembler* masm) {
+ // ----------- S t a t e -------------
+ // -- r0 : the number of arguments (not including the receiver)
+ // -- r1 : the constructor to call (can be any Object)
+ // -- r3 : the original constructor (either the same as the constructor or
+ // the JSFunction on which new was invoked initially)
+ // -----------------------------------
+
+ Label slow;
+ __ JumpIfSmi(r1, &slow);
+ __ CompareObjectType(r1, r5, r5, JS_FUNCTION_TYPE);
+ __ Jump(masm->isolate()->builtins()->ConstructFunction(),
+ RelocInfo::CODE_TARGET, eq);
+ __ cmp(r5, Operand(JS_FUNCTION_PROXY_TYPE));
+ __ b(ne, &slow);
+
+ // TODO(neis): This doesn't match the ES6 spec for [[Construct]] on proxies.
+ __ ldr(r1, FieldMemOperand(r1, JSFunctionProxy::kConstructTrapOffset));
+ __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
+
+ __ bind(&slow);
+ {
+ // Determine the delegate for the target (if any).
+ FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
+ __ SmiTag(r0);
+ __ Push(r0, r1);
+ __ CallRuntime(Runtime::kGetConstructorDelegate, 1);
+ __ mov(r1, r0);
+ __ Pop(r0);
+ __ SmiUntag(r0);
+ }
+ // The delegate is always a regular function.
+ __ AssertFunction(r1);
+ __ Jump(masm->isolate()->builtins()->CallFunction(), RelocInfo::CODE_TARGET);
+}
+
+
+// static
void Builtins::Generate_PushArgsAndCall(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- r0 : the number of arguments (not including the receiver)
// r2 : feedback vector
// r3 : slot in feedback vector (Smi, for RecordCallTarget)
// r4 : original constructor (for IsSuperConstructorCall)
- Label slow, non_function_call;
+ Label non_function;
// Check that the function is not a smi.
- __ JumpIfSmi(r1, &non_function_call);
+ __ JumpIfSmi(r1, &non_function);
// Check that the function is a JSFunction.
__ CompareObjectType(r1, r5, r5, JS_FUNCTION_TYPE);
- __ b(ne, &slow);
+ __ b(ne, &non_function);
if (RecordCallTarget()) {
GenerateRecordCallTarget(masm, IsSuperConstructorCall());
__ mov(r3, r1);
}
- // Jump to the function-specific construct stub.
- Register jmp_reg = r4;
- __ ldr(jmp_reg, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
- __ ldr(jmp_reg, FieldMemOperand(jmp_reg,
- SharedFunctionInfo::kConstructStubOffset));
- __ add(pc, jmp_reg, Operand(Code::kHeaderSize - kHeapObjectTag));
-
- // r0: number of arguments
- // r1: called object
- // r5: object type
- __ bind(&slow);
- {
- __ cmp(r5, Operand(JS_FUNCTION_PROXY_TYPE));
- __ b(ne, &non_function_call);
- // TODO(neis): This doesn't match the ES6 spec for [[Construct]] on proxies.
- __ ldr(r1, FieldMemOperand(r1, JSFunctionProxy::kConstructTrapOffset));
- __ Jump(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
+ // Tail call to the function-specific construct stub (still in the caller
+ // context at this point).
+ __ ldr(r4, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
+ __ ldr(r4, FieldMemOperand(r4, SharedFunctionInfo::kConstructStubOffset));
+ __ add(pc, r4, Operand(Code::kHeaderSize - kHeapObjectTag));
- __ bind(&non_function_call);
- {
- // Determine the delegate for the target (if any).
- FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
- __ SmiTag(r0);
- __ Push(r0, r1);
- __ CallRuntime(Runtime::kGetConstructorDelegate, 1);
- __ mov(r1, r0);
- __ Pop(r0);
- __ SmiUntag(r0);
- }
- // The delegate is always a regular function.
- __ AssertFunction(r1);
- __ Jump(masm->isolate()->builtins()->CallFunction(),
- RelocInfo::CODE_TARGET);
- }
+ __ bind(&non_function);
+ __ mov(r3, r1);
+ __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
}
// -- x0 : number of arguments excluding receiver
// -- x1 : called function (only guaranteed when
// extra_args requires it)
- // -- cp : context
// -- sp[0] : last argument
// -- ...
// -- sp[4 * (argc - 1)] : first argument (argc == x0)
// -- sp[4 * argc] : receiver
// -----------------------------------
+ __ AssertFunction(x1);
+
+ // Make sure we operate in the context of the called function (for example
+ // ConstructStubs implemented in C++ will be run in the context of the caller
+ // instead of the callee, due to the way that [[Construct]] is defined for
+ // ordinary functions).
+ // TODO(bmeurer): Can we make this more robust?
+ __ Ldr(cp, FieldMemOperand(x1, JSFunction::kContextOffset));
// Insert extra arguments.
int num_extra_args = 0;
// Input:
-// x0: code entry.
+// x0: new.target.
// x1: function.
// x2: receiver.
// x3: argc.
static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
bool is_construct) {
// Called from JSEntryStub::GenerateBody().
+ Register new_target = x0;
Register function = x1;
Register receiver = x2;
Register argc = x3;
Register argv = x4;
+ Register scratch = x10;
ProfileEntryHookStub::MaybeCallEntryHook(masm);
// Enter an internal frame.
FrameScope scope(masm, StackFrame::INTERNAL);
- // Set up the context from the function argument.
- __ Ldr(cp, FieldMemOperand(function, JSFunction::kContextOffset));
+ // Setup the context (we need to use the caller context from the isolate).
+ __ Mov(scratch, Operand(ExternalReference(Isolate::kContextAddress,
+ masm->isolate())));
+ __ Ldr(cp, MemOperand(scratch));
__ InitializeRootRegister();
// x4: argv.
Label loop, entry;
// Compute the copy end address.
- __ Add(x10, argv, Operand(argc, LSL, kPointerSizeLog2));
+ __ Add(scratch, argv, Operand(argc, LSL, kPointerSizeLog2));
__ B(&entry);
__ Bind(&loop);
__ Ldr(x12, MemOperand(x11)); // Dereference the handle.
__ Push(x12); // Push the argument.
__ Bind(&entry);
- __ Cmp(x10, argv);
+ __ Cmp(scratch, argv);
__ B(ne, &loop);
+ __ Mov(scratch, argc);
+ __ Mov(argc, new_target);
+ __ Mov(new_target, scratch);
+ // x0: argc.
+ // x3: new.target.
+
// Initialize all JavaScript callee-saved registers, since they will be seen
// by the garbage collector as part of handlers.
// The original values have been saved in JSEntryStub::GenerateBody().
// x28 : JS stack pointer (jssp).
// x29 : frame pointer (fp).
- __ Mov(x0, argc);
- if (is_construct) {
- // No type feedback cell is available.
- __ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
+ Handle<Code> builtin = is_construct
+ ? masm->isolate()->builtins()->Construct()
+ : masm->isolate()->builtins()->Call();
+ __ Call(builtin, RelocInfo::CODE_TARGET);
- CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS);
- __ CallStub(&stub);
- } else {
- __ Call(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
- }
// Exit the JS internal frame and remove the parameters (except function),
// and return.
}
// static
+void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
+ // ----------- S t a t e -------------
+ // -- x0 : the number of arguments (not including the receiver)
+ // -- x1 : the constructor to call (checked to be a JSFunction)
+ // -- x3 : the original constructor (checked to be a JSFunction)
+ // -----------------------------------
+ __ AssertFunction(x1);
+ __ AssertFunction(x3);
+
+ // Calling convention for function specific ConstructStubs require
+ // x2 to contain either an AllocationSite or undefined.
+ __ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
+
+ // Tail call to the function-specific construct stub (still in the caller
+ // context at this point).
+ __ Ldr(x4, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
+ __ Ldr(x4, FieldMemOperand(x4, SharedFunctionInfo::kConstructStubOffset));
+ __ Add(x4, x4, Code::kHeaderSize - kHeapObjectTag);
+ __ Br(x4);
+}
+
+
+// static
+void Builtins::Generate_Construct(MacroAssembler* masm) {
+ // ----------- S t a t e -------------
+ // -- x0 : the number of arguments (not including the receiver)
+ // -- x1 : the constructor to call (can be any Object)
+ // -- x3 : the original constructor (either the same as the constructor or
+ // the JSFunction on which new was invoked initially)
+ // -----------------------------------
+
+ Label slow;
+ __ JumpIfSmi(x1, &slow);
+ __ CompareObjectType(x1, x5, x5, JS_FUNCTION_TYPE);
+ __ Jump(masm->isolate()->builtins()->ConstructFunction(),
+ RelocInfo::CODE_TARGET, eq);
+ __ Cmp(x5, Operand(JS_FUNCTION_PROXY_TYPE));
+ __ B(ne, &slow);
+
+ // TODO(neis): This doesn't match the ES6 spec for [[Construct]] on proxies.
+ __ Ldr(x1, FieldMemOperand(x1, JSFunctionProxy::kConstructTrapOffset));
+ __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
+
+ __ Bind(&slow);
+ {
+ // Determine the delegate for the target (if any).
+ FrameScope scope(masm, StackFrame::INTERNAL);
+ __ SmiTag(x0);
+ __ Push(x0, x1);
+ __ CallRuntime(Runtime::kGetConstructorDelegate, 1);
+ __ Mov(x1, x0);
+ __ Pop(x0);
+ __ SmiUntag(x0);
+ }
+ // The delegate is always a regular function.
+ __ AssertFunction(x1);
+ __ Jump(masm->isolate()->builtins()->CallFunction(), RelocInfo::CODE_TARGET);
+}
+
+
+// static
void Builtins::Generate_PushArgsAndCall(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- x0 : the number of arguments (not including the receiver)
// x3 : slot in feedback vector (Smi, for RecordCallTarget)
// x4 : original constructor (for IsSuperConstructorCall)
Register function = x1;
- Label slow, non_function_call;
+ Label non_function;
// Check that the function is not a smi.
- __ JumpIfSmi(function, &non_function_call);
+ __ JumpIfSmi(function, &non_function);
// Check that the function is a JSFunction.
Register object_type = x10;
__ JumpIfNotObjectType(function, object_type, object_type, JS_FUNCTION_TYPE,
- &slow);
+ &non_function);
if (RecordCallTarget()) {
GenerateRecordCallTarget(masm, x0, function, x2, x3, x4, x5, x11, x12,
__ Mov(x3, function);
}
- // Jump to the function-specific construct stub.
- Register jump_reg = x4;
- Register shared_func_info = jump_reg;
- Register cons_stub = jump_reg;
- Register cons_stub_code = jump_reg;
- __ Ldr(shared_func_info,
- FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset));
- __ Ldr(cons_stub,
- FieldMemOperand(shared_func_info,
- SharedFunctionInfo::kConstructStubOffset));
- __ Add(cons_stub_code, cons_stub, Code::kHeaderSize - kHeapObjectTag);
- __ Br(cons_stub_code);
-
- __ Bind(&slow);
- {
- __ Cmp(object_type, JS_FUNCTION_PROXY_TYPE);
- __ B(ne, &non_function_call);
- // TODO(neis): This doesn't match the ES6 spec for [[Construct]] on proxies.
- __ Ldr(x1, FieldMemOperand(x1, JSFunctionProxy::kConstructTrapOffset));
- __ Jump(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
+ // Tail call to the function-specific construct stub (still in the caller
+ // context at this point).
+ __ Ldr(x4, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
+ __ Ldr(x4, FieldMemOperand(x4, SharedFunctionInfo::kConstructStubOffset));
+ __ Add(x4, x4, Code::kHeaderSize - kHeapObjectTag);
+ __ Br(x4);
- __ Bind(&non_function_call);
- {
- // Determine the delegate for the target (if any).
- FrameScope scope(masm, StackFrame::INTERNAL);
- __ SmiTag(x0);
- __ Push(x0, x1);
- __ CallRuntime(Runtime::kGetConstructorDelegate, 1);
- __ Mov(x1, x0);
- __ Pop(x0);
- __ SmiUntag(x0);
- }
- // The delegate is always a regular function.
- __ AssertFunction(x1);
- __ Jump(masm->isolate()->builtins()->CallFunction(),
- RelocInfo::CODE_TARGET);
- }
+ __ Bind(&non_function);
+ __ Mov(x3, function);
+ __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
}
}
-void MacroAssembler::Jump(intptr_t target, RelocInfo::Mode rmode) {
+void MacroAssembler::Jump(intptr_t target, RelocInfo::Mode rmode,
+ Condition cond) {
+ if (cond == nv) return;
UseScratchRegisterScope temps(this);
Register temp = temps.AcquireX();
+ Label done;
+ if (cond != al) B(NegateCondition(cond), &done);
Mov(temp, Operand(target, rmode));
Br(temp);
+ Bind(&done);
}
-void MacroAssembler::Jump(Address target, RelocInfo::Mode rmode) {
+void MacroAssembler::Jump(Address target, RelocInfo::Mode rmode,
+ Condition cond) {
DCHECK(!RelocInfo::IsCodeTarget(rmode));
- Jump(reinterpret_cast<intptr_t>(target), rmode);
+ Jump(reinterpret_cast<intptr_t>(target), rmode, cond);
}
-void MacroAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode) {
+void MacroAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode,
+ Condition cond) {
DCHECK(RelocInfo::IsCodeTarget(rmode));
AllowDeferredHandleDereference embedding_raw_address;
- Jump(reinterpret_cast<intptr_t>(code.location()), rmode);
+ Jump(reinterpret_cast<intptr_t>(code.location()), rmode, cond);
}
void GetBuiltinFunction(Register target, int native_context_index);
void Jump(Register target);
- void Jump(Address target, RelocInfo::Mode rmode);
- void Jump(Handle<Code> code, RelocInfo::Mode rmode);
- void Jump(intptr_t target, RelocInfo::Mode rmode);
+ void Jump(Address target, RelocInfo::Mode rmode, Condition cond = al);
+ void Jump(Handle<Code> code, RelocInfo::Mode rmode, Condition cond = al);
+ void Jump(intptr_t target, RelocInfo::Mode rmode, Condition cond = al);
void Call(Register target);
void Call(Label* target);
int64_t Simulator::CallJS(byte* entry,
- byte* function_entry,
+ Object* new_target,
JSFunction* func,
Object* revc,
int64_t argc,
Object*** argv) {
CallArgument args[] = {
- CallArgument(function_entry),
+ CallArgument(new_target),
CallArgument(func),
CallArgument(revc),
CallArgument(argc),
// generated RegExp code with 10 parameters. These are convenience functions,
// which set up the simulator state and grab the result on return.
int64_t CallJS(byte* entry,
- byte* function_entry,
+ Object* new_target,
JSFunction* func,
Object* revc,
int64_t argc,
V(CallFunction, BUILTIN, UNINITIALIZED, kNoExtraICState) \
V(Call, BUILTIN, UNINITIALIZED, kNoExtraICState) \
\
+ V(ConstructFunction, BUILTIN, UNINITIALIZED, kNoExtraICState) \
+ V(Construct, BUILTIN, UNINITIALIZED, kNoExtraICState) \
+ \
V(PushArgsAndCall, BUILTIN, UNINITIALIZED, kNoExtraICState) \
\
V(InOptimizationQueue, BUILTIN, UNINITIALIZED, kNoExtraICState) \
// ES6 section 7.3.12 Call(F, V, [argumentsList])
static void Generate_Call(MacroAssembler* masm);
+ // ES6 section 9.2.2 [[Construct]] ( argumentsList, newTarget)
+ static void Generate_ConstructFunction(MacroAssembler* masm);
+ // ES6 section 7.3.13 Construct (F, [argumentsList], [newTarget])
+ static void Generate_Construct(MacroAssembler* masm);
+
static void Generate_PushArgsAndCall(MacroAssembler* masm);
static void Generate_FunctionCall(MacroAssembler* masm);
MUST_USE_RESULT MaybeHandle<Object> Invoke(bool is_construct,
Handle<JSFunction> function,
Handle<Object> receiver, int argc,
- Handle<Object> args[]) {
+ Handle<Object> args[],
+ Handle<Object> new_target) {
Isolate* const isolate = function->GetIsolate();
// Convert calls on global objects to be calls on the global
// Placeholder for return value.
Object* value = NULL;
- typedef Object* (*JSEntryFunction)(byte* entry,
- Object* function,
- Object* receiver,
- int argc,
+ typedef Object* (*JSEntryFunction)(Object* new_target, Object* function,
+ Object* receiver, int argc,
Object*** args);
Handle<Code> code = is_construct
JSEntryFunction stub_entry = FUNCTION_CAST<JSEntryFunction>(code->entry());
// Call the function through the right JS entry stub.
- byte* ignored = nullptr; // TODO(bmeurer): Remove this altogether.
+ Object* orig_func = *new_target;
JSFunction* func = *function;
Object* recv = *receiver;
Object*** argv = reinterpret_cast<Object***>(args);
if (FLAG_profile_deserialization) PrintDeserializedCodeInfo(function);
- value = CALL_GENERATED_CODE(stub_entry, ignored, func, recv, argc, argv);
+ value = CALL_GENERATED_CODE(stub_entry, orig_func, func, recv, argc, argv);
}
#ifdef VERIFY_HEAP
GetFunctionDelegate(isolate, callable), Object);
}
Handle<JSFunction> func = Handle<JSFunction>::cast(callable);
+ return Invoke(false, func, receiver, argc, argv,
+ isolate->factory()->undefined_value());
+}
+
- return Invoke(false, func, receiver, argc, argv);
+MaybeHandle<Object> Execution::New(Handle<JSFunction> constructor, int argc,
+ Handle<Object> argv[]) {
+ return New(constructor, constructor, argc, argv);
}
-MaybeHandle<Object> Execution::New(Handle<JSFunction> func,
- int argc,
+MaybeHandle<Object> Execution::New(Handle<JSFunction> constructor,
+ Handle<JSFunction> new_target, int argc,
Handle<Object> argv[]) {
- return Invoke(true, func, handle(func->global_proxy()), argc, argv);
+ return Invoke(true, constructor, handle(constructor->global_proxy()), argc,
+ argv, new_target);
}
Handle<Object> argv[]);
// Construct object from function, the caller supplies an array of
- // arguments. Arguments are Object* type. After function returns,
- // pointers in 'args' might be invalid.
- //
- // *pending_exception tells whether the invoke resulted in
- // a pending exception.
- //
- MUST_USE_RESULT static MaybeHandle<Object> New(Handle<JSFunction> func,
+ // arguments.
+ MUST_USE_RESULT static MaybeHandle<Object> New(Handle<JSFunction> constructor,
+ int argc,
+ Handle<Object> argv[]);
+ MUST_USE_RESULT static MaybeHandle<Object> New(Handle<JSFunction> constructor,
+ Handle<JSFunction> new_target,
int argc,
Handle<Object> argv[]);
VisitForStackValue(args->at(0));
VisitForStackValue(args->at(1));
- // Load original constructor into r4.
- __ ldr(r4, MemOperand(sp, 1 * kPointerSize));
+ // Load original constructor into r3.
+ __ ldr(r3, MemOperand(sp, 1 * kPointerSize));
// Check if the calling frame is an arguments adaptor frame.
Label adaptor_frame, args_set_up, runtime;
__ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
- __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset));
- __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
+ __ ldr(r4, MemOperand(r2, StandardFrameConstants::kContextOffset));
+ __ cmp(r4, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
__ b(eq, &adaptor_frame);
// default constructor has no arguments, so no adaptor frame means no args.
__ mov(r0, Operand::Zero());
__ bind(&loop);
// Pre-decrement r2 with kPointerSize on each iteration.
// Pre-decrement in order to skip receiver.
- __ ldr(r3, MemOperand(r2, kPointerSize, NegPreIndex));
- __ Push(r3);
+ __ ldr(r4, MemOperand(r2, kPointerSize, NegPreIndex));
+ __ Push(r4);
__ sub(r1, r1, Operand(1));
__ cmp(r1, Operand::Zero());
__ b(ne, &loop);
__ bind(&args_set_up);
__ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
- __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
-
- CallConstructStub stub(isolate(), SUPER_CONSTRUCTOR_CALL);
- __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
+ __ Call(isolate()->builtins()->Construct(), RelocInfo::CONSTRUCT_CALL);
// Restore context register.
__ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
VisitForStackValue(args->at(0));
VisitForStackValue(args->at(1));
- // Load original constructor into x4.
- __ Peek(x4, 1 * kPointerSize);
+ // Load original constructor into x3.
+ __ Peek(x3, 1 * kPointerSize);
// Check if the calling frame is an arguments adaptor frame.
Label adaptor_frame, args_set_up, runtime;
__ bind(&args_set_up);
__ Peek(x1, Operand(x0, LSL, kPointerSizeLog2));
- __ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
-
- CallConstructStub stub(isolate(), SUPER_CONSTRUCTOR_CALL);
- __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
+ __ Call(isolate()->builtins()->Construct(), RelocInfo::CONSTRUCT_CALL);
// Restore context register.
__ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
VisitForStackValue(args->at(0));
VisitForStackValue(args->at(1));
- // Load original constructor into ecx.
- __ mov(ecx, Operand(esp, 1 * kPointerSize));
-
// Check if the calling frame is an arguments adaptor frame.
Label adaptor_frame, args_set_up, runtime;
__ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
__ bind(&args_set_up);
- __ mov(edi, Operand(esp, eax, times_pointer_size, 0));
- __ mov(ebx, Immediate(isolate()->factory()->undefined_value()));
- CallConstructStub stub(isolate(), SUPER_CONSTRUCTOR_CALL);
- __ call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
+ __ mov(edx, Operand(esp, eax, times_pointer_size, 1 * kPointerSize));
+ __ mov(edi, Operand(esp, eax, times_pointer_size, 0 * kPointerSize));
+ __ Call(isolate()->builtins()->Construct(), RelocInfo::CONSTRUCT_CALL);
// Restore context register.
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
VisitForStackValue(args->at(0));
VisitForStackValue(args->at(1));
- // Load original constructor into t0.
- __ lw(t0, MemOperand(sp, 1 * kPointerSize));
+ // Load original constructor into a3.
+ __ lw(a3, MemOperand(sp, 1 * kPointerSize));
// Check if the calling frame is an arguments adaptor frame.
Label adaptor_frame, args_set_up, runtime;
__ lw(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
- __ lw(a3, MemOperand(a2, StandardFrameConstants::kContextOffset));
- __ Branch(&adaptor_frame, eq, a3,
+ __ lw(t0, MemOperand(a2, StandardFrameConstants::kContextOffset));
+ __ Branch(&adaptor_frame, eq, t0,
Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
// default constructor has no arguments, so no adaptor frame means no args.
__ mov(a0, zero_reg);
// Pre-decrement a2 with kPointerSize on each iteration.
// Pre-decrement in order to skip receiver.
__ Addu(a2, a2, Operand(-kPointerSize));
- __ lw(a3, MemOperand(a2));
- __ Push(a3);
+ __ lw(t0, MemOperand(a2));
+ __ Push(t0);
__ Addu(a1, a1, Operand(-1));
__ Branch(&loop, ne, a1, Operand(zero_reg));
}
__ sll(at, a0, kPointerSizeLog2);
__ Addu(at, at, Operand(sp));
__ lw(a1, MemOperand(at, 0));
- __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
-
- CallConstructStub stub(isolate(), SUPER_CONSTRUCTOR_CALL);
- __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
+ __ Call(isolate()->builtins()->Construct(), RelocInfo::CONSTRUCT_CALL);
// Restore context register.
__ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
VisitForStackValue(args->at(0));
VisitForStackValue(args->at(1));
- // Load original constructor into a4.
- __ ld(a4, MemOperand(sp, 1 * kPointerSize));
+ // Load original constructor into a3.
+ __ ld(a3, MemOperand(sp, 1 * kPointerSize));
// Check if the calling frame is an arguments adaptor frame.
Label adaptor_frame, args_set_up, runtime;
__ ld(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
- __ ld(a3, MemOperand(a2, StandardFrameConstants::kContextOffset));
- __ Branch(&adaptor_frame, eq, a3,
+ __ ld(a4, MemOperand(a2, StandardFrameConstants::kContextOffset));
+ __ Branch(&adaptor_frame, eq, a4,
Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
// default constructor has no arguments, so no adaptor frame means no args.
__ mov(a0, zero_reg);
// Pre-decrement a2 with kPointerSize on each iteration.
// Pre-decrement in order to skip receiver.
__ Daddu(a2, a2, Operand(-kPointerSize));
- __ ld(a3, MemOperand(a2));
- __ Push(a3);
+ __ ld(a4, MemOperand(a2));
+ __ Push(a4);
__ Daddu(a1, a1, Operand(-1));
__ Branch(&loop, ne, a1, Operand(zero_reg));
}
__ dsll(at, a0, kPointerSizeLog2);
__ Daddu(at, at, Operand(sp));
__ ld(a1, MemOperand(at, 0));
- __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
-
- CallConstructStub stub(isolate(), SUPER_CONSTRUCTOR_CALL);
- __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
+ __ Call(isolate()->builtins()->Construct(), RelocInfo::CONSTRUCT_CALL);
// Restore context register.
__ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
VisitForStackValue(args->at(0));
VisitForStackValue(args->at(1));
- // Load original constructor into rcx.
- __ movp(rcx, Operand(rsp, 1 * kPointerSize));
-
// Check if the calling frame is an arguments adaptor frame.
Label adaptor_frame, args_set_up, runtime;
__ movp(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
}
__ bind(&args_set_up);
- __ movp(rdi, Operand(rsp, rax, times_pointer_size, 0));
- __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
-
- CallConstructStub stub(isolate(), SUPER_CONSTRUCTOR_CALL);
- __ call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
+ __ movp(rdx, Operand(rsp, rax, times_pointer_size, 1 * kPointerSize));
+ __ movp(rdi, Operand(rsp, rax, times_pointer_size, 0 * kPointerSize));
+ __ Call(isolate()->builtins()->Construct(), RelocInfo::CONSTRUCT_CALL);
// Restore context register.
__ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
NO_CALL_CONSTRUCTOR_FLAGS = 0,
// The call target is cached in the instruction stream.
RECORD_CONSTRUCTOR_TARGET = 1,
+ // TODO(bmeurer): Kill these SUPER_* modes and use the Construct builtin
+ // directly instead; also there's no point in collecting any "targets" for
+ // super constructor calls, since these are known when we optimize the
+ // constructor that contains the super call.
SUPER_CONSTRUCTOR_CALL = 1 << 1,
SUPER_CALL_RECORD_TARGET = SUPER_CONSTRUCTOR_CALL | RECORD_CONSTRUCTOR_TARGET
};
// -- eax : number of arguments excluding receiver
// -- edi : called function (only guaranteed when
// extra_args requires it)
- // -- esi : context
// -- esp[0] : return address
// -- esp[4] : last argument
// -- ...
// -- esp[4 * argc] : first argument (argc == eax)
// -- esp[4 * (argc +1)] : receiver
// -----------------------------------
+ __ AssertFunction(edi);
+
+ // Make sure we operate in the context of the called function (for example
+ // ConstructStubs implemented in C++ will be run in the context of the caller
+ // instead of the callee, due to the way that [[Construct]] is defined for
+ // ordinary functions).
+ // TODO(bmeurer): Can we make this more robust?
+ __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
// Insert extra arguments.
int num_extra_args = 0;
{
FrameScope scope(masm, StackFrame::INTERNAL);
+ // Setup the context (we need to use the caller context from the isolate).
+ ExternalReference context_address(Isolate::kContextAddress,
+ masm->isolate());
+ __ mov(esi, Operand::StaticVariable(context_address));
+
// Load the previous frame pointer (ebx) to access C arguments
__ mov(ebx, Operand(ebp, 0));
- // Get the function from the frame and setup the context.
- __ mov(ecx, Operand(ebx, EntryFrameConstants::kFunctionArgOffset));
- __ mov(esi, FieldOperand(ecx, JSFunction::kContextOffset));
-
// Push the function and the receiver onto the stack.
- __ push(ecx);
+ __ push(Operand(ebx, EntryFrameConstants::kFunctionArgOffset));
__ push(Operand(ebx, EntryFrameConstants::kReceiverArgOffset));
// Load the number of arguments and setup pointer to the arguments.
// Copy arguments to the stack in a loop.
Label loop, entry;
__ Move(ecx, Immediate(0));
- __ jmp(&entry);
+ __ jmp(&entry, Label::kNear);
__ bind(&loop);
__ mov(edx, Operand(ebx, ecx, times_4, 0)); // push parameter from argv
__ push(Operand(edx, 0)); // dereference handle
__ cmp(ecx, eax);
__ j(not_equal, &loop);
- // Get the function from the stack and call it.
- // kPointerSize for the receiver.
- __ mov(edi, Operand(esp, eax, times_4, kPointerSize));
+ // Load the previous frame pointer (ebx) to access C arguments
+ __ mov(ebx, Operand(ebp, 0));
+
+ // Get the new.target and function from the frame.
+ __ mov(edx, Operand(ebx, EntryFrameConstants::kNewTargetArgOffset));
+ __ mov(edi, Operand(ebx, EntryFrameConstants::kFunctionArgOffset));
// Invoke the code.
- if (is_construct) {
- // No type feedback cell is available
- __ mov(ebx, masm->isolate()->factory()->undefined_value());
- CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS);
- __ CallStub(&stub);
- } else {
- __ Call(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
- }
+ Handle<Code> builtin = is_construct
+ ? masm->isolate()->builtins()->Construct()
+ : masm->isolate()->builtins()->Call();
+ __ Call(builtin, RelocInfo::CODE_TARGET);
// Exit the internal frame. Notice that this also removes the empty.
// context and the function left on the stack by the code
// static
+void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
+ // ----------- S t a t e -------------
+ // -- eax : the number of arguments (not including the receiver)
+ // -- edx : the original constructor (checked to be a JSFunction)
+ // -- edi : the constructor to call (checked to be a JSFunction)
+ // -----------------------------------
+ __ AssertFunction(edx);
+ __ AssertFunction(edi);
+
+ // Calling convention for function specific ConstructStubs require
+ // ebx to contain either an AllocationSite or undefined.
+ __ LoadRoot(ebx, Heap::kUndefinedValueRootIndex);
+
+ // Tail call to the function-specific construct stub (still in the caller
+ // context at this point).
+ __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
+ __ mov(ecx, FieldOperand(ecx, SharedFunctionInfo::kConstructStubOffset));
+ __ lea(ecx, FieldOperand(ecx, Code::kHeaderSize));
+ __ jmp(ecx);
+}
+
+
+// static
+void Builtins::Generate_Construct(MacroAssembler* masm) {
+ // ----------- S t a t e -------------
+ // -- eax : the number of arguments (not including the receiver)
+ // -- edx : the original constructor (either the same as the constructor or
+ // the JSFunction on which new was invoked initially)
+ // -- edi : the constructor to call (can be any Object)
+ // -----------------------------------
+
+ Label slow;
+ __ JumpIfSmi(edi, &slow, Label::kNear);
+ __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
+ __ j(equal, masm->isolate()->builtins()->ConstructFunction(),
+ RelocInfo::CODE_TARGET);
+ __ CmpInstanceType(ecx, JS_FUNCTION_PROXY_TYPE);
+ __ j(not_equal, &slow, Label::kNear);
+
+ // TODO(neis): This doesn't match the ES6 spec for [[Construct]] on proxies.
+ __ mov(edi, FieldOperand(edi, JSFunctionProxy::kConstructTrapOffset));
+ __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
+
+ __ bind(&slow);
+ {
+ // Determine the delegate for the target (if any).
+ FrameScope scope(masm, StackFrame::INTERNAL);
+ __ SmiTag(eax);
+ __ Push(eax);
+ __ Push(edi);
+ __ CallRuntime(Runtime::kGetConstructorDelegate, 1);
+ __ mov(edi, eax);
+ __ Pop(eax);
+ __ SmiUntag(eax);
+ }
+ // The delegate is always a regular function.
+ __ AssertFunction(edi);
+ __ Jump(masm->isolate()->builtins()->CallFunction(), RelocInfo::CODE_TARGET);
+}
+
+
+// static
void Builtins::Generate_PushArgsAndCall(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- eax : the number of arguments (not including the receiver)
// ecx : original constructor (for IsSuperConstructorCall)
// edx : slot in feedback vector (Smi, for RecordCallTarget)
// edi : constructor function
- Label slow, non_function_call;
if (IsSuperConstructorCall()) {
__ push(ecx);
}
+ Label non_function;
// Check that function is not a smi.
- __ JumpIfSmi(edi, &non_function_call);
+ __ JumpIfSmi(edi, &non_function);
// Check that function is a JSFunction.
__ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
- __ j(not_equal, &slow);
+ __ j(not_equal, &non_function);
if (RecordCallTarget()) {
GenerateRecordCallTarget(masm, IsSuperConstructorCall());
__ mov(edx, edi);
}
- // Jump to the function-specific construct stub.
- Register jmp_reg = ecx;
- __ mov(jmp_reg, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
- __ mov(jmp_reg, FieldOperand(jmp_reg,
- SharedFunctionInfo::kConstructStubOffset));
- __ lea(jmp_reg, FieldOperand(jmp_reg, Code::kHeaderSize));
- __ jmp(jmp_reg);
-
- // edi: called object
- // eax: number of arguments
- // ecx: object map
- // esp[0]: original receiver (for IsSuperConstructorCall)
- __ bind(&slow);
- {
- __ CmpInstanceType(ecx, JS_FUNCTION_PROXY_TYPE);
- __ j(not_equal, &non_function_call, Label::kNear);
- if (IsSuperConstructorCall()) __ Drop(1);
- // TODO(neis): This doesn't match the ES6 spec for [[Construct]] on proxies.
- __ mov(edi, FieldOperand(edi, JSFunctionProxy::kConstructTrapOffset));
- __ Jump(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
-
- __ bind(&non_function_call);
- if (IsSuperConstructorCall()) __ Drop(1);
- {
- // Determine the delegate for the target (if any).
- FrameScope scope(masm, StackFrame::INTERNAL);
- __ SmiTag(eax);
- __ Push(eax);
- __ Push(edi);
- __ CallRuntime(Runtime::kGetConstructorDelegate, 1);
- __ mov(edi, eax);
- __ Pop(eax);
- __ SmiUntag(eax);
- }
- // The delegate is always a regular function.
- __ AssertFunction(edi);
- __ Jump(isolate()->builtins()->CallFunction(), RelocInfo::CODE_TARGET);
- }
+ // Tail call to the function-specific construct stub (still in the caller
+ // context at this point).
+ __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
+ __ mov(ecx, FieldOperand(ecx, SharedFunctionInfo::kConstructStubOffset));
+ __ lea(ecx, FieldOperand(ecx, Code::kHeaderSize));
+ __ jmp(ecx);
+
+ __ bind(&non_function);
+ if (IsSuperConstructorCall()) __ Drop(1);
+ __ mov(edx, edi);
+ __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
}
public:
static const int kCallerFPOffset = -6 * kPointerSize;
+ static const int kNewTargetArgOffset = +2 * kPointerSize;
static const int kFunctionArgOffset = +3 * kPointerSize;
static const int kReceiverArgOffset = +4 * kPointerSize;
static const int kArgcOffset = +5 * kPointerSize;
// -- a0 : number of arguments excluding receiver
// -- a1 : called function (only guaranteed when
// -- extra_args requires it)
- // -- cp : context
// -- sp[0] : last argument
// -- ...
// -- sp[4 * (argc - 1)] : first argument
// -- sp[4 * agrc] : receiver
// -----------------------------------
+ __ AssertFunction(a1);
+
+ // Make sure we operate in the context of the called function (for example
+ // ConstructStubs implemented in C++ will be run in the context of the caller
+ // instead of the callee, due to the way that [[Construct]] is defined for
+ // ordinary functions).
+ // TODO(bmeurer): Can we make this more robust?
+ __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
// Insert extra arguments.
int num_extra_args = 0;
// Called from JSEntryStub::GenerateBody
// ----------- S t a t e -------------
- // -- a0: code entry
+ // -- a0: new.target
// -- a1: function
// -- a2: receiver_pointer
// -- a3: argc
{
FrameScope scope(masm, StackFrame::INTERNAL);
- // Set up the context from the function argument.
- __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
+ // Setup the context (we need to use the caller context from the isolate).
+ ExternalReference context_address(Isolate::kContextAddress,
+ masm->isolate());
+ __ li(cp, Operand(context_address));
+ __ lw(cp, MemOperand(cp));
// Push the function and the receiver onto the stack.
__ Push(a1, a2);
// Clobbers a2.
Generate_CheckStackOverflow(masm, kFunctionOffset, a3, kArgcIsUntaggedInt);
+ // Remember new.target.
+ __ mov(t1, a0);
+
// Copy arguments to the stack in a loop.
// a3: argc
// s0: argv, i.e. points to first arg
__ bind(&entry);
__ Branch(&loop, ne, s0, Operand(t2));
+ // Setup new.target and argc.
+ __ mov(a0, a3);
+ __ mov(a3, t1);
+
// Initialize all JavaScript callee-saved registers, since they will be seen
// by the garbage collector as part of handlers.
__ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
// s6 holds the root address. Do not clobber.
// s7 is cp. Do not init.
- // Invoke the code and pass argc as a0.
- __ mov(a0, a3);
- if (is_construct) {
- // No type feedback cell is available
- __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
- CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS);
- __ CallStub(&stub);
- } else {
- __ Call(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
- }
+ // Invoke the code.
+ Handle<Code> builtin = is_construct
+ ? masm->isolate()->builtins()->Construct()
+ : masm->isolate()->builtins()->Call();
+ __ Call(builtin, RelocInfo::CODE_TARGET);
// Leave internal frame.
}
// static
+void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
+ // ----------- S t a t e -------------
+ // -- a0 : the number of arguments (not including the receiver)
+ // -- a1 : the constructor to call (checked to be a JSFunction)
+ // -- a3 : the original constructor (checked to be a JSFunction)
+ // -----------------------------------
+ __ AssertFunction(a1);
+ __ AssertFunction(a3);
+
+ // Calling convention for function specific ConstructStubs require
+ // a2 to contain either an AllocationSite or undefined.
+ __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
+
+ // Tail call to the function-specific construct stub (still in the caller
+ // context at this point).
+ __ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
+ __ lw(t0, FieldMemOperand(t0, SharedFunctionInfo::kConstructStubOffset));
+ __ Addu(at, t0, Operand(Code::kHeaderSize - kHeapObjectTag));
+ __ Jump(at);
+}
+
+
+// static
+void Builtins::Generate_Construct(MacroAssembler* masm) {
+ // ----------- S t a t e -------------
+ // -- a0 : the number of arguments (not including the receiver)
+ // -- a1 : the constructor to call (can be any Object)
+ // -- a3 : the original constructor (either the same as the constructor or
+ // the JSFunction on which new was invoked initially)
+ // -----------------------------------
+
+ Label slow;
+ __ JumpIfSmi(a1, &slow);
+ __ GetObjectType(a1, t1, t1);
+ __ Jump(masm->isolate()->builtins()->ConstructFunction(),
+ RelocInfo::CODE_TARGET, eq, t1, Operand(JS_FUNCTION_TYPE));
+ __ Branch(&slow, ne, t1, Operand(JS_FUNCTION_PROXY_TYPE));
+
+ // TODO(neis): This doesn't match the ES6 spec for [[Construct]] on proxies.
+ __ lw(a1, FieldMemOperand(a1, JSFunctionProxy::kConstructTrapOffset));
+ __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
+
+ __ bind(&slow);
+ {
+ // Determine the delegate for the target (if any).
+ FrameScope scope(masm, StackFrame::INTERNAL);
+ __ SmiTag(a0);
+ __ Push(a0, a1);
+ __ CallRuntime(Runtime::kGetConstructorDelegate, 1);
+ __ mov(a1, v0);
+ __ Pop(a0);
+ __ SmiUntag(a0);
+ }
+ // The delegate is always a regular function.
+ __ AssertFunction(a1);
+ __ Jump(masm->isolate()->builtins()->CallFunction(), RelocInfo::CODE_TARGET);
+}
+
+
+// static
void Builtins::Generate_PushArgsAndCall(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- a0 : the number of arguments (not including the receiver)
// a2 : feedback vector
// a3 : slot in feedback vector (Smi, for RecordCallTarget)
// t0 : original constructor (for IsSuperConstructorCall)
- Label slow, non_function_call;
+ Label non_function;
// Check that the function is not a smi.
- __ JumpIfSmi(a1, &non_function_call);
+ __ JumpIfSmi(a1, &non_function);
// Check that the function is a JSFunction.
__ GetObjectType(a1, t1, t1);
- __ Branch(&slow, ne, t1, Operand(JS_FUNCTION_TYPE));
+ __ Branch(&non_function, ne, t1, Operand(JS_FUNCTION_TYPE));
if (RecordCallTarget()) {
GenerateRecordCallTarget(masm, IsSuperConstructorCall());
__ mov(a3, a1);
}
- // Jump to the function-specific construct stub.
- Register jmp_reg = t0;
- __ lw(jmp_reg, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
- __ lw(jmp_reg, FieldMemOperand(jmp_reg,
- SharedFunctionInfo::kConstructStubOffset));
- __ Addu(at, jmp_reg, Operand(Code::kHeaderSize - kHeapObjectTag));
+ // Tail call to the function-specific construct stub (still in the caller
+ // context at this point).
+ __ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
+ __ lw(t0, FieldMemOperand(t0, SharedFunctionInfo::kConstructStubOffset));
+ __ Addu(at, t0, Operand(Code::kHeaderSize - kHeapObjectTag));
__ Jump(at);
- // a0: number of arguments
- // a1: called object
- // t1: object type
- __ bind(&slow);
- {
- __ Branch(&non_function_call, ne, t1, Operand(JS_FUNCTION_PROXY_TYPE));
- // TODO(neis): This doesn't match the ES6 spec for [[Construct]] on proxies.
- __ lw(a1, FieldMemOperand(a1, JSFunctionProxy::kConstructTrapOffset));
- __ Jump(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
-
- __ bind(&non_function_call);
- {
- // Determine the delegate for the target (if any).
- FrameScope scope(masm, StackFrame::INTERNAL);
- __ SmiTag(a0);
- __ Push(a0, a1);
- __ CallRuntime(Runtime::kGetConstructorDelegate, 1);
- __ mov(a1, v0);
- __ Pop(a0);
- __ SmiUntag(a0);
- }
- // The delegate is always a regular function.
- __ AssertFunction(a1);
- __ Jump(masm->isolate()->builtins()->CallFunction(),
- RelocInfo::CODE_TARGET);
- }
+ __ bind(&non_function);
+ __ mov(a3, a1);
+ __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
}
// -- a0 : number of arguments excluding receiver
// -- a1 : called function (only guaranteed when
// -- extra_args requires it)
- // -- cp : context
// -- sp[0] : last argument
// -- ...
// -- sp[8 * (argc - 1)] : first argument
// -- sp[8 * agrc] : receiver
// -----------------------------------
+ __ AssertFunction(a1);
+
+ // Make sure we operate in the context of the called function (for example
+ // ConstructStubs implemented in C++ will be run in the context of the caller
+ // instead of the callee, due to the way that [[Construct]] is defined for
+ // ordinary functions).
+ // TODO(bmeurer): Can we make this more robust?
+ __ ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
// Insert extra arguments.
int num_extra_args = 0;
// Called from JSEntryStub::GenerateBody
// ----------- S t a t e -------------
- // -- a0: code entry
+ // -- a0: new.target
// -- a1: function
// -- a2: receiver_pointer
// -- a3: argc
{
FrameScope scope(masm, StackFrame::INTERNAL);
- // Set up the context from the function argument.
- __ ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
+ // Setup the context (we need to use the caller context from the isolate).
+ ExternalReference context_address(Isolate::kContextAddress,
+ masm->isolate());
+ __ li(cp, Operand(context_address));
+ __ ld(cp, MemOperand(cp));
// Push the function and the receiver onto the stack.
__ Push(a1, a2);
// Clobbers a2.
Generate_CheckStackOverflow(masm, kFunctionOffset, a3, kArgcIsUntaggedInt);
+ // Remember new.target.
+ __ mov(a5, a0);
+
// Copy arguments to the stack in a loop.
// a3: argc
// s0: argv, i.e. points to first arg
__ bind(&entry);
__ Branch(&loop, ne, s0, Operand(a6));
+ // Setup new.target and argc.
+ __ mov(a0, a3);
+ __ mov(a3, a5);
+
// Initialize all JavaScript callee-saved registers, since they will be seen
// by the garbage collector as part of handlers.
__ LoadRoot(a4, Heap::kUndefinedValueRootIndex);
// s6 holds the root address. Do not clobber.
// s7 is cp. Do not init.
- // Invoke the code and pass argc as a0.
- __ mov(a0, a3);
- if (is_construct) {
- // No type feedback cell is available
- __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
- CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS);
- __ CallStub(&stub);
- } else {
- __ Call(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
- }
+ // Invoke the code.
+ Handle<Code> builtin = is_construct
+ ? masm->isolate()->builtins()->Construct()
+ : masm->isolate()->builtins()->Call();
+ __ Call(builtin, RelocInfo::CODE_TARGET);
// Leave internal frame.
}
}
+void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
+ // ----------- S t a t e -------------
+ // -- a0 : the number of arguments (not including the receiver)
+ // -- a1 : the constructor to call (checked to be a JSFunction)
+ // -- a3 : the original constructor (checked to be a JSFunction)
+ // -----------------------------------
+ __ AssertFunction(a1);
+ __ AssertFunction(a3);
+
+ // Calling convention for function specific ConstructStubs require
+ // a2 to contain either an AllocationSite or undefined.
+ __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
+
+ // Tail call to the function-specific construct stub (still in the caller
+ // context at this point).
+ __ ld(a4, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
+ __ ld(a4, FieldMemOperand(a4, SharedFunctionInfo::kConstructStubOffset));
+ __ Daddu(at, a4, Operand(Code::kHeaderSize - kHeapObjectTag));
+ __ Jump(at);
+}
+
+
+// static
+void Builtins::Generate_Construct(MacroAssembler* masm) {
+ // ----------- S t a t e -------------
+ // -- a0 : the number of arguments (not including the receiver)
+ // -- a1 : the constructor to call (can be any Object)
+ // -- a3 : the original constructor (either the same as the constructor or
+ // the JSFunction on which new was invoked initially)
+ // -----------------------------------
+
+ Label slow;
+ __ JumpIfSmi(a1, &slow);
+ __ GetObjectType(a1, a5, a5);
+ __ Jump(masm->isolate()->builtins()->ConstructFunction(),
+ RelocInfo::CODE_TARGET, eq, a5, Operand(JS_FUNCTION_TYPE));
+ __ Branch(&slow, ne, a5, Operand(JS_FUNCTION_PROXY_TYPE));
+
+ // TODO(neis): This doesn't match the ES6 spec for [[Construct]] on proxies.
+ __ ld(a1, FieldMemOperand(a1, JSFunctionProxy::kConstructTrapOffset));
+ __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
+
+ __ bind(&slow);
+ {
+ // Determine the delegate for the target (if any).
+ FrameScope scope(masm, StackFrame::INTERNAL);
+ __ SmiTag(a0);
+ __ Push(a0, a1);
+ __ CallRuntime(Runtime::kGetConstructorDelegate, 1);
+ __ mov(a1, v0);
+ __ Pop(a0);
+ __ SmiUntag(a0);
+ }
+ // The delegate is always a regular function.
+ __ AssertFunction(a1);
+ __ Jump(masm->isolate()->builtins()->CallFunction(), RelocInfo::CODE_TARGET);
+}
+
+
// static
void Builtins::Generate_PushArgsAndCall(MacroAssembler* masm) {
// ----------- S t a t e -------------
// a2 : feedback vector
// a3 : slot in feedback vector (Smi, for RecordCallTarget)
// a4 : original constructor (for IsSuperConstructorCall)
- Label slow, non_function_call;
+
+ Label non_function;
// Check that the function is not a smi.
- __ JumpIfSmi(a1, &non_function_call);
+ __ JumpIfSmi(a1, &non_function);
// Check that the function is a JSFunction.
__ GetObjectType(a1, a5, a5);
- __ Branch(&slow, ne, a5, Operand(JS_FUNCTION_TYPE));
+ __ Branch(&non_function, ne, a5, Operand(JS_FUNCTION_TYPE));
if (RecordCallTarget()) {
GenerateRecordCallTarget(masm, IsSuperConstructorCall());
__ mov(a3, a1);
}
- // Jump to the function-specific construct stub.
- Register jmp_reg = a4;
- __ ld(jmp_reg, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
- __ ld(jmp_reg, FieldMemOperand(jmp_reg,
- SharedFunctionInfo::kConstructStubOffset));
- __ Daddu(at, jmp_reg, Operand(Code::kHeaderSize - kHeapObjectTag));
+ // Tail call to the function-specific construct stub (still in the caller
+ // context at this point).
+ __ ld(a4, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
+ __ ld(a4, FieldMemOperand(a4, SharedFunctionInfo::kConstructStubOffset));
+ __ Daddu(at, a4, Operand(Code::kHeaderSize - kHeapObjectTag));
__ Jump(at);
- // a0: number of arguments
- // a1: called object
- // a5: object type
- __ bind(&slow);
- {
- __ Branch(&non_function_call, ne, a5, Operand(JS_FUNCTION_PROXY_TYPE));
- // TODO(neis): This doesn't match the ES6 spec for [[Construct]] on proxies.
- __ ld(a1, FieldMemOperand(a1, JSFunctionProxy::kConstructTrapOffset));
- __ Jump(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
-
- __ bind(&non_function_call);
- {
- // Determine the delegate for the target (if any).
- FrameScope scope(masm, StackFrame::INTERNAL);
- __ SmiTag(a0);
- __ Push(a0, a1);
- __ CallRuntime(Runtime::kGetConstructorDelegate, 1);
- __ mov(a1, v0);
- __ Pop(a0);
- __ SmiUntag(a0);
- }
- // The delegate is always a regular function.
- __ AssertFunction(a1);
- __ Jump(masm->isolate()->builtins()->CallFunction(),
- RelocInfo::CODE_TARGET);
- }
+ __ bind(&non_function);
+ __ mov(a3, a1);
+ __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
}
base::SmartArrayPointer<Handle<Object>> arguments =
Runtime::GetCallerArguments(isolate, 0, &argument_count);
- // Prepare the array containing all passed arguments.
- Handle<FixedArray> elements =
- isolate->factory()->NewUninitializedFixedArray(argument_count);
- for (int i = 0; i < argument_count; ++i) {
- elements->set(i, *arguments[i]);
- }
- Handle<JSArray> array = isolate->factory()->NewJSArrayWithElements(
- elements, FAST_ELEMENTS, argument_count);
-
- // Call %reflect_construct(<super>, <args>, <new.target>) now.
- Handle<JSFunction> reflect = isolate->reflect_construct();
- Handle<Object> argv[] = {super_constructor, array, original_constructor};
Handle<Object> result;
ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
- isolate, result,
- Execution::Call(isolate, reflect, isolate->factory()->undefined_value(),
- arraysize(argv), argv));
+ isolate, result, Execution::New(super_constructor, original_constructor,
+ argument_count, arguments.get()));
return *result;
}
+
} // namespace internal
} // namespace v8
// -- rax : number of arguments excluding receiver
// -- rdi : called function (only guaranteed when
// extra_args requires it)
- // -- rsi : context
// -- rsp[0] : return address
// -- rsp[8] : last argument
// -- ...
// -- rsp[8 * argc] : first argument (argc == rax)
// -- rsp[8 * (argc + 1)] : receiver
// -----------------------------------
+ __ AssertFunction(rdi);
+
+ // Make sure we operate in the context of the called function (for example
+ // ConstructStubs implemented in C++ will be run in the context of the caller
+ // instead of the callee, due to the way that [[Construct]] is defined for
+ // ordinary functions).
+ // TODO(bmeurer): Can we make this more robust?
+ __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
// Insert extra arguments.
int num_extra_args = 0;
enum IsTagged { kRaxIsSmiTagged, kRaxIsUntaggedInt };
-// Clobbers rcx, rdx, kScratchRegister; preserves all other registers.
+// Clobbers rcx, r11, kScratchRegister; preserves all other registers.
static void Generate_CheckStackOverflow(MacroAssembler* masm,
const int calleeOffset,
IsTagged rax_is_tagged) {
// Make rcx the space we have left. The stack might already be overflowed
// here which will cause rcx to become negative.
__ subp(rcx, kScratchRegister);
- // Make rdx the space we need for the array when it is unrolled onto the
+ // Make r11 the space we need for the array when it is unrolled onto the
// stack.
if (rax_is_tagged == kRaxIsSmiTagged) {
- __ PositiveSmiTimesPowerOfTwoToInteger64(rdx, rax, kPointerSizeLog2);
+ __ PositiveSmiTimesPowerOfTwoToInteger64(r11, rax, kPointerSizeLog2);
} else {
DCHECK(rax_is_tagged == kRaxIsUntaggedInt);
- __ movp(rdx, rax);
- __ shlq(rdx, Immediate(kPointerSizeLog2));
+ __ movp(r11, rax);
+ __ shlq(r11, Immediate(kPointerSizeLog2));
}
// Check if the arguments will overflow the stack.
- __ cmpp(rcx, rdx);
+ __ cmpp(rcx, r11);
__ j(greater, &okay); // Signed comparison.
// Out of stack space.
ProfileEntryHookStub::MaybeCallEntryHook(masm);
// Expects five C++ function parameters.
- // - Address entry (ignored)
- // - JSFunction* function (
+ // - Object* new_target
+ // - JSFunction* function
// - Object* receiver
// - int argc
// - Object*** argv
// Platform specific argument handling. After this, the stack contains
// an internal frame and the pushed function and receiver, and
// register rax and rbx holds the argument count and argument array,
- // while rdi holds the function pointer and rsi the context.
+ // while rdi holds the function pointer, rsi the context, and rdx the
+ // new.target.
#ifdef _WIN64
// MSVC parameters in:
- // rcx : entry (ignored)
+ // rcx : new_target
// rdx : function
// r8 : receiver
// r9 : argc
// Clear the context before we push it when entering the internal frame.
__ Set(rsi, 0);
+
// Enter an internal frame.
FrameScope scope(masm, StackFrame::INTERNAL);
- // Load the function context into rsi.
- __ movp(rsi, FieldOperand(rdx, JSFunction::kContextOffset));
+ // Setup the context (we need to use the caller context from the isolate).
+ ExternalReference context_address(Isolate::kContextAddress,
+ masm->isolate());
+ __ movp(rsi, masm->ExternalOperand(context_address));
// Push the function and the receiver onto the stack.
__ Push(rdx);
__ movp(rbx, Operand(kScratchRegister, EntryFrameConstants::kArgvOffset));
// Load the function pointer into rdi.
__ movp(rdi, rdx);
+ // Load the new.target into rdx.
+ __ movp(rdx, rcx);
#else // _WIN64
// GCC parameters in:
- // rdi : entry (ignored)
+ // rdi : new_target
// rsi : function
// rdx : receiver
// rcx : argc
// r8 : argv
+ __ movp(r11, rdi);
__ movp(rdi, rsi);
// rdi : function
+ // r11 : new_target
// Clear the context before we push it when entering the internal frame.
__ Set(rsi, 0);
+
// Enter an internal frame.
FrameScope scope(masm, StackFrame::INTERNAL);
- // Push the function and receiver and setup the context.
+ // Setup the context (we need to use the caller context from the isolate).
+ ExternalReference context_address(Isolate::kContextAddress,
+ masm->isolate());
+ __ movp(rsi, masm->ExternalOperand(context_address));
+
+ // Push the function and receiver onto the stack.
__ Push(rdi);
__ Push(rdx);
- __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
// Load the number of arguments and setup pointer to the arguments.
__ movp(rax, rcx);
__ movp(rbx, r8);
+
+ // Load the new.target into rdx.
+ __ movp(rdx, r11);
#endif // _WIN64
// Current stack contents:
// rbx : argv
// rsi : context
// rdi : function
+ // rdx : new.target
// Check if we have enough stack space to push all arguments.
// The function is the first thing that was pushed above after entering
// the internal frame.
const int kFunctionOffset =
InternalFrameConstants::kCodeOffset - kRegisterSize;
- // Expects argument count in rax. Clobbers rcx, rdx.
+ // Expects argument count in rax. Clobbers rcx, r11.
Generate_CheckStackOverflow(masm, kFunctionOffset, kRaxIsUntaggedInt);
// Copy arguments to the stack in a loop.
// Push the values of these handles.
Label loop, entry;
__ Set(rcx, 0); // Set loop variable to 0.
- __ jmp(&entry);
+ __ jmp(&entry, Label::kNear);
__ bind(&loop);
__ movp(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0));
__ Push(Operand(kScratchRegister, 0)); // dereference handle
__ cmpp(rcx, rax);
__ j(not_equal, &loop);
- // Invoke the code.
- if (is_construct) {
- // No type feedback cell is available
- __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
- // Expects rdi to hold function pointer.
- CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS);
- __ CallStub(&stub);
- } else {
- __ Call(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
- }
+ // Invoke the builtin code.
+ Handle<Code> builtin = is_construct
+ ? masm->isolate()->builtins()->Construct()
+ : masm->isolate()->builtins()->Call();
+ __ Call(builtin, RelocInfo::CODE_TARGET);
+
// Exit the internal frame. Notice that this also removes the empty
// context and the function left on the stack by the code
// invocation.
void Builtins::Generate_Call(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- rax : the number of arguments (not including the receiver)
- // -- rdi : the target to call (can be any Object).
+ // -- rdi : the target to call (can be any Object)
// -----------------------------------
Label non_smi, non_function;
// static
+void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
+ // ----------- S t a t e -------------
+ // -- rax : the number of arguments (not including the receiver)
+ // -- rdx : the original constructor (checked to be a JSFunction)
+ // -- rdi : the constructor to call (checked to be a JSFunction)
+ // -----------------------------------
+ __ AssertFunction(rdx);
+ __ AssertFunction(rdi);
+
+ // Calling convention for function specific ConstructStubs require
+ // rbx to contain either an AllocationSite or undefined.
+ __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
+
+ // Tail call to the function-specific construct stub (still in the caller
+ // context at this point).
+ __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
+ __ movp(rcx, FieldOperand(rcx, SharedFunctionInfo::kConstructStubOffset));
+ __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize));
+ __ jmp(rcx);
+}
+
+
+// static
+void Builtins::Generate_Construct(MacroAssembler* masm) {
+ // ----------- S t a t e -------------
+ // -- rax : the number of arguments (not including the receiver)
+ // -- rdx : the original constructor (either the same as the constructor or
+ // the JSFunction on which new was invoked initially)
+ // -- rdi : the constructor to call (can be any Object)
+ // -----------------------------------
+
+ Label slow;
+ __ JumpIfSmi(rdi, &slow, Label::kNear);
+ __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
+ __ j(equal, masm->isolate()->builtins()->ConstructFunction(),
+ RelocInfo::CODE_TARGET);
+ __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE);
+ __ j(not_equal, &slow, Label::kNear);
+
+ // TODO(neis): This doesn't match the ES6 spec for [[Construct]] on proxies.
+ __ movp(rdi, FieldOperand(rdi, JSFunctionProxy::kConstructTrapOffset));
+ __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
+
+ __ bind(&slow);
+ {
+ // Determine the delegate for the target (if any).
+ FrameScope scope(masm, StackFrame::INTERNAL);
+ __ Integer32ToSmi(rax, rax);
+ __ Push(rax);
+ __ Push(rdi);
+ __ CallRuntime(Runtime::kGetConstructorDelegate, 1);
+ __ movp(rdi, rax);
+ __ Pop(rax);
+ __ SmiToInteger32(rax, rax);
+ }
+ // The delegate is always a regular function.
+ __ AssertFunction(rdi);
+ __ Jump(masm->isolate()->builtins()->CallFunction(), RelocInfo::CODE_TARGET);
+}
+
+
+// static
void Builtins::Generate_PushArgsAndCall(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- rax : the number of arguments (not including the receiver)
// rcx : original constructor (for IsSuperConstructorCall)
// rdx : slot in feedback vector (Smi, for RecordCallTarget)
// rdi : constructor function
- Label slow, non_function_call;
- // Check that function is not a smi.
- __ JumpIfSmi(rdi, &non_function_call);
- // Check that function is a JSFunction.
+ Label non_function;
+ // Check that the constructor is not a smi.
+ __ JumpIfSmi(rdi, &non_function);
+ // Check that constructor is a JSFunction.
__ CmpObjectType(rdi, JS_FUNCTION_TYPE, r11);
- __ j(not_equal, &slow);
+ __ j(not_equal, &non_function);
if (RecordCallTarget()) {
GenerateRecordCallTarget(masm, IsSuperConstructorCall());
__ movp(rdx, rdi);
}
- // Jump to the function-specific construct stub.
- Register jmp_reg = rcx;
- __ movp(jmp_reg, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
- __ movp(jmp_reg, FieldOperand(jmp_reg,
- SharedFunctionInfo::kConstructStubOffset));
- __ leap(jmp_reg, FieldOperand(jmp_reg, Code::kHeaderSize));
- __ jmp(jmp_reg);
-
- // rdi: called object
- // rax: number of arguments
- // r11: object map
- __ bind(&slow);
- {
- __ CmpInstanceType(r11, JS_FUNCTION_PROXY_TYPE);
- __ j(not_equal, &non_function_call, Label::kNear);
-
- // TODO(neis): This doesn't match the ES6 spec for [[Construct]] on proxies.
- __ movp(rdi, FieldOperand(rdi, JSFunctionProxy::kConstructTrapOffset));
- __ Jump(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
+ // Tail call to the function-specific construct stub (still in the caller
+ // context at this point).
+ __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
+ __ movp(rcx, FieldOperand(rcx, SharedFunctionInfo::kConstructStubOffset));
+ __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize));
+ __ jmp(rcx);
- __ bind(&non_function_call);
- {
- // Determine the delegate for the target (if any).
- FrameScope scope(masm, StackFrame::INTERNAL);
- __ Integer32ToSmi(rax, rax);
- __ Push(rax);
- __ Push(rdi);
- __ CallRuntime(Runtime::kGetConstructorDelegate, 1);
- __ movp(rdi, rax);
- __ Pop(rax);
- __ SmiToInteger32(rax, rax);
- }
- // The delegate is always a regular function.
- __ AssertFunction(rdi);
- __ Jump(isolate()->builtins()->CallFunction(), RelocInfo::CODE_TARGET);
- }
+ __ bind(&non_function);
+ __ movp(rdx, rdi);
+ __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
}