From: bmeurer Date: Tue, 22 Sep 2015 04:27:14 +0000 (-0700) Subject: [builtins] Add support for NewTarget to Execution::New. X-Git-Tag: upstream/4.7.83~183 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=1dfac69f1fb5ad8e2db2d5be591c094def55d451;p=platform%2Fupstream%2Fv8.git [builtins] Add support for NewTarget to Execution::New. Introduce new builtins Construct and ConstructFunction (in line with the Call and CallFunction builtins that we already have) as proper bottleneck for Construct and [[Construct]] on JSFunctions. Use these builtins to support passing NewTarget from C++ to JavaScript land. Long-term we want the CallConstructStub to be used for gathering feedback on entry to construction chain (i.e. the initial new Foo), and use the Construct builtins to do the actual work inside the construction chain (i.e. calling into super and stuff). MIPS and MIPS64 ports contributed by akos.palfi@imgtec.com. R=jarin@chromium.org BUG=v8:4430 LOG=n Review URL: https://codereview.chromium.org/1359583002 Cr-Commit-Position: refs/heads/master@{#30857} --- diff --git a/src/arm/builtins-arm.cc b/src/arm/builtins-arm.cc index 041009387..9b87243a4 100644 --- a/src/arm/builtins-arm.cc +++ b/src/arm/builtins-arm.cc @@ -24,12 +24,19 @@ void Builtins::Generate_Adaptor(MacroAssembler* masm, // -- r0 : number of arguments excluding receiver // -- r1 : called function (only guaranteed when // extra_args requires it) - // -- cp : context // -- sp[0] : last argument // -- ... // -- sp[4 * (argc - 1)] : first argument (argc == r0) // -- sp[4 * argc] : receiver // ----------------------------------- + __ AssertFunction(r1); + + // Make sure we operate in the context of the called function (for example + // ConstructStubs implemented in C++ will be run in the context of the caller + // instead of the callee, due to the way that [[Construct]] is defined for + // ordinary functions). + // TODO(bmeurer): Can we make this more robust? + __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset)); // Insert extra arguments. int num_extra_args = 0; @@ -730,7 +737,7 @@ static void Generate_CheckStackOverflow(MacroAssembler* masm, static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, bool is_construct) { // Called from Generate_JS_Entry - // r0: code entry + // r0: new.target // r1: function // r2: receiver // r3: argc @@ -745,14 +752,16 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, { FrameScope scope(masm, StackFrame::INTERNAL); - // Set up the context from the function argument. - __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset)); + // Setup the context (we need to use the caller context from the isolate). + ExternalReference context_address(Isolate::kContextAddress, + masm->isolate()); + __ mov(cp, Operand(context_address)); + __ ldr(cp, MemOperand(cp)); __ InitializeRootRegister(); // Push the function and the receiver onto the stack. - __ push(r1); - __ push(r2); + __ Push(r1, r2); // Check if we have enough stack space to push all arguments. // The function is the first thing that was pushed above after entering @@ -762,6 +771,9 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, // Clobbers r2. Generate_CheckStackOverflow(masm, kFunctionOffset, r3, kArgcIsUntaggedInt); + // Remember new.target. + __ mov(r5, r0); + // Copy arguments to the stack in a loop. // r1: function // r3: argc @@ -778,6 +790,10 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, __ cmp(r4, r2); __ b(ne, &loop); + // Setup new.target and argc. + __ mov(r0, Operand(r3)); + __ mov(r3, Operand(r5)); + // Initialize all JavaScript callee-saved registers, since they will be seen // by the garbage collector as part of handlers. __ LoadRoot(r4, Heap::kUndefinedValueRootIndex); @@ -790,16 +806,12 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, __ mov(r9, Operand(r4)); } - // Invoke the code and pass argc as r0. - __ mov(r0, Operand(r3)); - if (is_construct) { - // No type feedback cell is available - __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); - CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS); - __ CallStub(&stub); - } else { - __ Call(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); - } + // Invoke the code. + Handle builtin = is_construct + ? masm->isolate()->builtins()->Construct() + : masm->isolate()->builtins()->Call(); + __ Call(builtin, RelocInfo::CODE_TARGET); + // Exit the JS frame and remove the parameters (except function), and // return. // Respect ABI stack constraint. @@ -1615,6 +1627,66 @@ void Builtins::Generate_Call(MacroAssembler* masm) { } +// static +void Builtins::Generate_ConstructFunction(MacroAssembler* masm) { + // ----------- S t a t e ------------- + // -- r0 : the number of arguments (not including the receiver) + // -- r1 : the constructor to call (checked to be a JSFunction) + // -- r3 : the original constructor (checked to be a JSFunction) + // ----------------------------------- + __ AssertFunction(r1); + __ AssertFunction(r3); + + // Calling convention for function specific ConstructStubs require + // r2 to contain either an AllocationSite or undefined. + __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); + + // Tail call to the function-specific construct stub (still in the caller + // context at this point). + __ ldr(r4, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); + __ ldr(r4, FieldMemOperand(r4, SharedFunctionInfo::kConstructStubOffset)); + __ add(pc, r4, Operand(Code::kHeaderSize - kHeapObjectTag)); +} + + +// static +void Builtins::Generate_Construct(MacroAssembler* masm) { + // ----------- S t a t e ------------- + // -- r0 : the number of arguments (not including the receiver) + // -- r1 : the constructor to call (can be any Object) + // -- r3 : the original constructor (either the same as the constructor or + // the JSFunction on which new was invoked initially) + // ----------------------------------- + + Label slow; + __ JumpIfSmi(r1, &slow); + __ CompareObjectType(r1, r5, r5, JS_FUNCTION_TYPE); + __ Jump(masm->isolate()->builtins()->ConstructFunction(), + RelocInfo::CODE_TARGET, eq); + __ cmp(r5, Operand(JS_FUNCTION_PROXY_TYPE)); + __ b(ne, &slow); + + // TODO(neis): This doesn't match the ES6 spec for [[Construct]] on proxies. + __ ldr(r1, FieldMemOperand(r1, JSFunctionProxy::kConstructTrapOffset)); + __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); + + __ bind(&slow); + { + // Determine the delegate for the target (if any). + FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); + __ SmiTag(r0); + __ Push(r0, r1); + __ CallRuntime(Runtime::kGetConstructorDelegate, 1); + __ mov(r1, r0); + __ Pop(r0); + __ SmiUntag(r0); + } + // The delegate is always a regular function. + __ AssertFunction(r1); + __ Jump(masm->isolate()->builtins()->CallFunction(), RelocInfo::CODE_TARGET); +} + + // static void Builtins::Generate_PushArgsAndCall(MacroAssembler* masm) { // ----------- S t a t e ------------- diff --git a/src/arm/code-stubs-arm.cc b/src/arm/code-stubs-arm.cc index 154db4f24..ab5a89fbb 100644 --- a/src/arm/code-stubs-arm.cc +++ b/src/arm/code-stubs-arm.cc @@ -2470,13 +2470,13 @@ void CallConstructStub::Generate(MacroAssembler* masm) { // r2 : feedback vector // r3 : slot in feedback vector (Smi, for RecordCallTarget) // r4 : original constructor (for IsSuperConstructorCall) - Label slow, non_function_call; + Label non_function; // Check that the function is not a smi. - __ JumpIfSmi(r1, &non_function_call); + __ JumpIfSmi(r1, &non_function); // Check that the function is a JSFunction. __ CompareObjectType(r1, r5, r5, JS_FUNCTION_TYPE); - __ b(ne, &slow); + __ b(ne, &non_function); if (RecordCallTarget()) { GenerateRecordCallTarget(masm, IsSuperConstructorCall()); @@ -2501,40 +2501,15 @@ void CallConstructStub::Generate(MacroAssembler* masm) { __ mov(r3, r1); } - // Jump to the function-specific construct stub. - Register jmp_reg = r4; - __ ldr(jmp_reg, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); - __ ldr(jmp_reg, FieldMemOperand(jmp_reg, - SharedFunctionInfo::kConstructStubOffset)); - __ add(pc, jmp_reg, Operand(Code::kHeaderSize - kHeapObjectTag)); - - // r0: number of arguments - // r1: called object - // r5: object type - __ bind(&slow); - { - __ cmp(r5, Operand(JS_FUNCTION_PROXY_TYPE)); - __ b(ne, &non_function_call); - // TODO(neis): This doesn't match the ES6 spec for [[Construct]] on proxies. - __ ldr(r1, FieldMemOperand(r1, JSFunctionProxy::kConstructTrapOffset)); - __ Jump(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); + // Tail call to the function-specific construct stub (still in the caller + // context at this point). + __ ldr(r4, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); + __ ldr(r4, FieldMemOperand(r4, SharedFunctionInfo::kConstructStubOffset)); + __ add(pc, r4, Operand(Code::kHeaderSize - kHeapObjectTag)); - __ bind(&non_function_call); - { - // Determine the delegate for the target (if any). - FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); - __ SmiTag(r0); - __ Push(r0, r1); - __ CallRuntime(Runtime::kGetConstructorDelegate, 1); - __ mov(r1, r0); - __ Pop(r0); - __ SmiUntag(r0); - } - // The delegate is always a regular function. - __ AssertFunction(r1); - __ Jump(masm->isolate()->builtins()->CallFunction(), - RelocInfo::CODE_TARGET); - } + __ bind(&non_function); + __ mov(r3, r1); + __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); } diff --git a/src/arm64/builtins-arm64.cc b/src/arm64/builtins-arm64.cc index 01eef0506..7a21635db 100644 --- a/src/arm64/builtins-arm64.cc +++ b/src/arm64/builtins-arm64.cc @@ -51,12 +51,19 @@ void Builtins::Generate_Adaptor(MacroAssembler* masm, // -- x0 : number of arguments excluding receiver // -- x1 : called function (only guaranteed when // extra_args requires it) - // -- cp : context // -- sp[0] : last argument // -- ... // -- sp[4 * (argc - 1)] : first argument (argc == x0) // -- sp[4 * argc] : receiver // ----------------------------------- + __ AssertFunction(x1); + + // Make sure we operate in the context of the called function (for example + // ConstructStubs implemented in C++ will be run in the context of the caller + // instead of the callee, due to the way that [[Construct]] is defined for + // ordinary functions). + // TODO(bmeurer): Can we make this more robust? + __ Ldr(cp, FieldMemOperand(x1, JSFunction::kContextOffset)); // Insert extra arguments. int num_extra_args = 0; @@ -754,7 +761,7 @@ static void Generate_CheckStackOverflow(MacroAssembler* masm, // Input: -// x0: code entry. +// x0: new.target. // x1: function. // x2: receiver. // x3: argc. @@ -764,10 +771,12 @@ static void Generate_CheckStackOverflow(MacroAssembler* masm, static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, bool is_construct) { // Called from JSEntryStub::GenerateBody(). + Register new_target = x0; Register function = x1; Register receiver = x2; Register argc = x3; Register argv = x4; + Register scratch = x10; ProfileEntryHookStub::MaybeCallEntryHook(masm); @@ -778,8 +787,10 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, // Enter an internal frame. FrameScope scope(masm, StackFrame::INTERNAL); - // Set up the context from the function argument. - __ Ldr(cp, FieldMemOperand(function, JSFunction::kContextOffset)); + // Setup the context (we need to use the caller context from the isolate). + __ Mov(scratch, Operand(ExternalReference(Isolate::kContextAddress, + masm->isolate()))); + __ Ldr(cp, MemOperand(scratch)); __ InitializeRootRegister(); @@ -800,7 +811,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, // x4: argv. Label loop, entry; // Compute the copy end address. - __ Add(x10, argv, Operand(argc, LSL, kPointerSizeLog2)); + __ Add(scratch, argv, Operand(argc, LSL, kPointerSizeLog2)); __ B(&entry); __ Bind(&loop); @@ -808,9 +819,15 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, __ Ldr(x12, MemOperand(x11)); // Dereference the handle. __ Push(x12); // Push the argument. __ Bind(&entry); - __ Cmp(x10, argv); + __ Cmp(scratch, argv); __ B(ne, &loop); + __ Mov(scratch, argc); + __ Mov(argc, new_target); + __ Mov(new_target, scratch); + // x0: argc. + // x3: new.target. + // Initialize all JavaScript callee-saved registers, since they will be seen // by the garbage collector as part of handlers. // The original values have been saved in JSEntryStub::GenerateBody(). @@ -827,16 +844,11 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, // x28 : JS stack pointer (jssp). // x29 : frame pointer (fp). - __ Mov(x0, argc); - if (is_construct) { - // No type feedback cell is available. - __ LoadRoot(x2, Heap::kUndefinedValueRootIndex); + Handle builtin = is_construct + ? masm->isolate()->builtins()->Construct() + : masm->isolate()->builtins()->Call(); + __ Call(builtin, RelocInfo::CODE_TARGET); - CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS); - __ CallStub(&stub); - } else { - __ Call(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); - } // Exit the JS internal frame and remove the parameters (except function), // and return. } @@ -1678,6 +1690,67 @@ void Builtins::Generate_Call(MacroAssembler* masm) { } +// static +void Builtins::Generate_ConstructFunction(MacroAssembler* masm) { + // ----------- S t a t e ------------- + // -- x0 : the number of arguments (not including the receiver) + // -- x1 : the constructor to call (checked to be a JSFunction) + // -- x3 : the original constructor (checked to be a JSFunction) + // ----------------------------------- + __ AssertFunction(x1); + __ AssertFunction(x3); + + // Calling convention for function specific ConstructStubs require + // x2 to contain either an AllocationSite or undefined. + __ LoadRoot(x2, Heap::kUndefinedValueRootIndex); + + // Tail call to the function-specific construct stub (still in the caller + // context at this point). + __ Ldr(x4, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); + __ Ldr(x4, FieldMemOperand(x4, SharedFunctionInfo::kConstructStubOffset)); + __ Add(x4, x4, Code::kHeaderSize - kHeapObjectTag); + __ Br(x4); +} + + +// static +void Builtins::Generate_Construct(MacroAssembler* masm) { + // ----------- S t a t e ------------- + // -- x0 : the number of arguments (not including the receiver) + // -- x1 : the constructor to call (can be any Object) + // -- x3 : the original constructor (either the same as the constructor or + // the JSFunction on which new was invoked initially) + // ----------------------------------- + + Label slow; + __ JumpIfSmi(x1, &slow); + __ CompareObjectType(x1, x5, x5, JS_FUNCTION_TYPE); + __ Jump(masm->isolate()->builtins()->ConstructFunction(), + RelocInfo::CODE_TARGET, eq); + __ Cmp(x5, Operand(JS_FUNCTION_PROXY_TYPE)); + __ B(ne, &slow); + + // TODO(neis): This doesn't match the ES6 spec for [[Construct]] on proxies. + __ Ldr(x1, FieldMemOperand(x1, JSFunctionProxy::kConstructTrapOffset)); + __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); + + __ Bind(&slow); + { + // Determine the delegate for the target (if any). + FrameScope scope(masm, StackFrame::INTERNAL); + __ SmiTag(x0); + __ Push(x0, x1); + __ CallRuntime(Runtime::kGetConstructorDelegate, 1); + __ Mov(x1, x0); + __ Pop(x0); + __ SmiUntag(x0); + } + // The delegate is always a regular function. + __ AssertFunction(x1); + __ Jump(masm->isolate()->builtins()->CallFunction(), RelocInfo::CODE_TARGET); +} + + // static void Builtins::Generate_PushArgsAndCall(MacroAssembler* masm) { // ----------- S t a t e ------------- diff --git a/src/arm64/code-stubs-arm64.cc b/src/arm64/code-stubs-arm64.cc index 9def80315..5ba08f76d 100644 --- a/src/arm64/code-stubs-arm64.cc +++ b/src/arm64/code-stubs-arm64.cc @@ -2841,14 +2841,14 @@ void CallConstructStub::Generate(MacroAssembler* masm) { // x3 : slot in feedback vector (Smi, for RecordCallTarget) // x4 : original constructor (for IsSuperConstructorCall) Register function = x1; - Label slow, non_function_call; + Label non_function; // Check that the function is not a smi. - __ JumpIfSmi(function, &non_function_call); + __ JumpIfSmi(function, &non_function); // Check that the function is a JSFunction. Register object_type = x10; __ JumpIfNotObjectType(function, object_type, object_type, JS_FUNCTION_TYPE, - &slow); + &non_function); if (RecordCallTarget()) { GenerateRecordCallTarget(masm, x0, function, x2, x3, x4, x5, x11, x12, @@ -2873,43 +2873,16 @@ void CallConstructStub::Generate(MacroAssembler* masm) { __ Mov(x3, function); } - // Jump to the function-specific construct stub. - Register jump_reg = x4; - Register shared_func_info = jump_reg; - Register cons_stub = jump_reg; - Register cons_stub_code = jump_reg; - __ Ldr(shared_func_info, - FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset)); - __ Ldr(cons_stub, - FieldMemOperand(shared_func_info, - SharedFunctionInfo::kConstructStubOffset)); - __ Add(cons_stub_code, cons_stub, Code::kHeaderSize - kHeapObjectTag); - __ Br(cons_stub_code); - - __ Bind(&slow); - { - __ Cmp(object_type, JS_FUNCTION_PROXY_TYPE); - __ B(ne, &non_function_call); - // TODO(neis): This doesn't match the ES6 spec for [[Construct]] on proxies. - __ Ldr(x1, FieldMemOperand(x1, JSFunctionProxy::kConstructTrapOffset)); - __ Jump(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); + // Tail call to the function-specific construct stub (still in the caller + // context at this point). + __ Ldr(x4, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); + __ Ldr(x4, FieldMemOperand(x4, SharedFunctionInfo::kConstructStubOffset)); + __ Add(x4, x4, Code::kHeaderSize - kHeapObjectTag); + __ Br(x4); - __ Bind(&non_function_call); - { - // Determine the delegate for the target (if any). - FrameScope scope(masm, StackFrame::INTERNAL); - __ SmiTag(x0); - __ Push(x0, x1); - __ CallRuntime(Runtime::kGetConstructorDelegate, 1); - __ Mov(x1, x0); - __ Pop(x0); - __ SmiUntag(x0); - } - // The delegate is always a regular function. - __ AssertFunction(x1); - __ Jump(masm->isolate()->builtins()->CallFunction(), - RelocInfo::CODE_TARGET); - } + __ Bind(&non_function); + __ Mov(x3, function); + __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); } diff --git a/src/arm64/macro-assembler-arm64.cc b/src/arm64/macro-assembler-arm64.cc index c04cc83bf..3c42194e0 100644 --- a/src/arm64/macro-assembler-arm64.cc +++ b/src/arm64/macro-assembler-arm64.cc @@ -1877,24 +1877,31 @@ void MacroAssembler::Jump(Register target) { } -void MacroAssembler::Jump(intptr_t target, RelocInfo::Mode rmode) { +void MacroAssembler::Jump(intptr_t target, RelocInfo::Mode rmode, + Condition cond) { + if (cond == nv) return; UseScratchRegisterScope temps(this); Register temp = temps.AcquireX(); + Label done; + if (cond != al) B(NegateCondition(cond), &done); Mov(temp, Operand(target, rmode)); Br(temp); + Bind(&done); } -void MacroAssembler::Jump(Address target, RelocInfo::Mode rmode) { +void MacroAssembler::Jump(Address target, RelocInfo::Mode rmode, + Condition cond) { DCHECK(!RelocInfo::IsCodeTarget(rmode)); - Jump(reinterpret_cast(target), rmode); + Jump(reinterpret_cast(target), rmode, cond); } -void MacroAssembler::Jump(Handle code, RelocInfo::Mode rmode) { +void MacroAssembler::Jump(Handle code, RelocInfo::Mode rmode, + Condition cond) { DCHECK(RelocInfo::IsCodeTarget(rmode)); AllowDeferredHandleDereference embedding_raw_address; - Jump(reinterpret_cast(code.location()), rmode); + Jump(reinterpret_cast(code.location()), rmode, cond); } diff --git a/src/arm64/macro-assembler-arm64.h b/src/arm64/macro-assembler-arm64.h index f5731cf04..95068d2d5 100644 --- a/src/arm64/macro-assembler-arm64.h +++ b/src/arm64/macro-assembler-arm64.h @@ -1149,9 +1149,9 @@ class MacroAssembler : public Assembler { void GetBuiltinFunction(Register target, int native_context_index); void Jump(Register target); - void Jump(Address target, RelocInfo::Mode rmode); - void Jump(Handle code, RelocInfo::Mode rmode); - void Jump(intptr_t target, RelocInfo::Mode rmode); + void Jump(Address target, RelocInfo::Mode rmode, Condition cond = al); + void Jump(Handle code, RelocInfo::Mode rmode, Condition cond = al); + void Jump(intptr_t target, RelocInfo::Mode rmode, Condition cond = al); void Call(Register target); void Call(Label* target); diff --git a/src/arm64/simulator-arm64.cc b/src/arm64/simulator-arm64.cc index 83fd164bb..f4e483915 100644 --- a/src/arm64/simulator-arm64.cc +++ b/src/arm64/simulator-arm64.cc @@ -177,13 +177,13 @@ double Simulator::CallDouble(byte* entry, CallArgument* args) { int64_t Simulator::CallJS(byte* entry, - byte* function_entry, + Object* new_target, JSFunction* func, Object* revc, int64_t argc, Object*** argv) { CallArgument args[] = { - CallArgument(function_entry), + CallArgument(new_target), CallArgument(func), CallArgument(revc), CallArgument(argc), diff --git a/src/arm64/simulator-arm64.h b/src/arm64/simulator-arm64.h index d456bd9ed..cd734ad29 100644 --- a/src/arm64/simulator-arm64.h +++ b/src/arm64/simulator-arm64.h @@ -187,7 +187,7 @@ class Simulator : public DecoderVisitor { // generated RegExp code with 10 parameters. These are convenience functions, // which set up the simulator state and grab the result on return. int64_t CallJS(byte* entry, - byte* function_entry, + Object* new_target, JSFunction* func, Object* revc, int64_t argc, diff --git a/src/builtins.h b/src/builtins.h index f2b6b1e5d..56f2db91e 100644 --- a/src/builtins.h +++ b/src/builtins.h @@ -77,6 +77,9 @@ enum BuiltinExtraArguments { V(CallFunction, BUILTIN, UNINITIALIZED, kNoExtraICState) \ V(Call, BUILTIN, UNINITIALIZED, kNoExtraICState) \ \ + V(ConstructFunction, BUILTIN, UNINITIALIZED, kNoExtraICState) \ + V(Construct, BUILTIN, UNINITIALIZED, kNoExtraICState) \ + \ V(PushArgsAndCall, BUILTIN, UNINITIALIZED, kNoExtraICState) \ \ V(InOptimizationQueue, BUILTIN, UNINITIALIZED, kNoExtraICState) \ @@ -276,6 +279,11 @@ class Builtins { // ES6 section 7.3.12 Call(F, V, [argumentsList]) static void Generate_Call(MacroAssembler* masm); + // ES6 section 9.2.2 [[Construct]] ( argumentsList, newTarget) + static void Generate_ConstructFunction(MacroAssembler* masm); + // ES6 section 7.3.13 Construct (F, [argumentsList], [newTarget]) + static void Generate_Construct(MacroAssembler* masm); + static void Generate_PushArgsAndCall(MacroAssembler* masm); static void Generate_FunctionCall(MacroAssembler* masm); diff --git a/src/execution.cc b/src/execution.cc index ea1190efc..c214e7c4e 100644 --- a/src/execution.cc +++ b/src/execution.cc @@ -58,7 +58,8 @@ namespace { MUST_USE_RESULT MaybeHandle Invoke(bool is_construct, Handle function, Handle receiver, int argc, - Handle args[]) { + Handle args[], + Handle new_target) { Isolate* const isolate = function->GetIsolate(); // Convert calls on global objects to be calls on the global @@ -108,10 +109,8 @@ MUST_USE_RESULT MaybeHandle Invoke(bool is_construct, // Placeholder for return value. Object* value = NULL; - typedef Object* (*JSEntryFunction)(byte* entry, - Object* function, - Object* receiver, - int argc, + typedef Object* (*JSEntryFunction)(Object* new_target, Object* function, + Object* receiver, int argc, Object*** args); Handle code = is_construct @@ -130,12 +129,12 @@ MUST_USE_RESULT MaybeHandle Invoke(bool is_construct, JSEntryFunction stub_entry = FUNCTION_CAST(code->entry()); // Call the function through the right JS entry stub. - byte* ignored = nullptr; // TODO(bmeurer): Remove this altogether. + Object* orig_func = *new_target; JSFunction* func = *function; Object* recv = *receiver; Object*** argv = reinterpret_cast(args); if (FLAG_profile_deserialization) PrintDeserializedCodeInfo(function); - value = CALL_GENERATED_CODE(stub_entry, ignored, func, recv, argc, argv); + value = CALL_GENERATED_CODE(stub_entry, orig_func, func, recv, argc, argv); } #ifdef VERIFY_HEAP @@ -172,15 +171,22 @@ MaybeHandle Execution::Call(Isolate* isolate, Handle callable, GetFunctionDelegate(isolate, callable), Object); } Handle func = Handle::cast(callable); + return Invoke(false, func, receiver, argc, argv, + isolate->factory()->undefined_value()); +} + - return Invoke(false, func, receiver, argc, argv); +MaybeHandle Execution::New(Handle constructor, int argc, + Handle argv[]) { + return New(constructor, constructor, argc, argv); } -MaybeHandle Execution::New(Handle func, - int argc, +MaybeHandle Execution::New(Handle constructor, + Handle new_target, int argc, Handle argv[]) { - return Invoke(true, func, handle(func->global_proxy()), argc, argv); + return Invoke(true, constructor, handle(constructor->global_proxy()), argc, + argv, new_target); } diff --git a/src/execution.h b/src/execution.h index 51fe3d32c..5c25082a6 100644 --- a/src/execution.h +++ b/src/execution.h @@ -31,13 +31,12 @@ class Execution final : public AllStatic { Handle argv[]); // Construct object from function, the caller supplies an array of - // arguments. Arguments are Object* type. After function returns, - // pointers in 'args' might be invalid. - // - // *pending_exception tells whether the invoke resulted in - // a pending exception. - // - MUST_USE_RESULT static MaybeHandle New(Handle func, + // arguments. + MUST_USE_RESULT static MaybeHandle New(Handle constructor, + int argc, + Handle argv[]); + MUST_USE_RESULT static MaybeHandle New(Handle constructor, + Handle new_target, int argc, Handle argv[]); diff --git a/src/full-codegen/arm/full-codegen-arm.cc b/src/full-codegen/arm/full-codegen-arm.cc index 4470387b2..6fb5e218e 100644 --- a/src/full-codegen/arm/full-codegen-arm.cc +++ b/src/full-codegen/arm/full-codegen-arm.cc @@ -4013,14 +4013,14 @@ void FullCodeGenerator::EmitDefaultConstructorCallSuper(CallRuntime* expr) { VisitForStackValue(args->at(0)); VisitForStackValue(args->at(1)); - // Load original constructor into r4. - __ ldr(r4, MemOperand(sp, 1 * kPointerSize)); + // Load original constructor into r3. + __ ldr(r3, MemOperand(sp, 1 * kPointerSize)); // Check if the calling frame is an arguments adaptor frame. Label adaptor_frame, args_set_up, runtime; __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); - __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset)); - __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); + __ ldr(r4, MemOperand(r2, StandardFrameConstants::kContextOffset)); + __ cmp(r4, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); __ b(eq, &adaptor_frame); // default constructor has no arguments, so no adaptor frame means no args. __ mov(r0, Operand::Zero()); @@ -4040,8 +4040,8 @@ void FullCodeGenerator::EmitDefaultConstructorCallSuper(CallRuntime* expr) { __ bind(&loop); // Pre-decrement r2 with kPointerSize on each iteration. // Pre-decrement in order to skip receiver. - __ ldr(r3, MemOperand(r2, kPointerSize, NegPreIndex)); - __ Push(r3); + __ ldr(r4, MemOperand(r2, kPointerSize, NegPreIndex)); + __ Push(r4); __ sub(r1, r1, Operand(1)); __ cmp(r1, Operand::Zero()); __ b(ne, &loop); @@ -4049,10 +4049,7 @@ void FullCodeGenerator::EmitDefaultConstructorCallSuper(CallRuntime* expr) { __ bind(&args_set_up); __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2)); - __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); - - CallConstructStub stub(isolate(), SUPER_CONSTRUCTOR_CALL); - __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL); + __ Call(isolate()->builtins()->Construct(), RelocInfo::CONSTRUCT_CALL); // Restore context register. __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); diff --git a/src/full-codegen/arm64/full-codegen-arm64.cc b/src/full-codegen/arm64/full-codegen-arm64.cc index 64c634a66..5b6fd81b9 100644 --- a/src/full-codegen/arm64/full-codegen-arm64.cc +++ b/src/full-codegen/arm64/full-codegen-arm64.cc @@ -3729,8 +3729,8 @@ void FullCodeGenerator::EmitDefaultConstructorCallSuper(CallRuntime* expr) { VisitForStackValue(args->at(0)); VisitForStackValue(args->at(1)); - // Load original constructor into x4. - __ Peek(x4, 1 * kPointerSize); + // Load original constructor into x3. + __ Peek(x3, 1 * kPointerSize); // Check if the calling frame is an arguments adaptor frame. Label adaptor_frame, args_set_up, runtime; @@ -3765,10 +3765,7 @@ void FullCodeGenerator::EmitDefaultConstructorCallSuper(CallRuntime* expr) { __ bind(&args_set_up); __ Peek(x1, Operand(x0, LSL, kPointerSizeLog2)); - __ LoadRoot(x2, Heap::kUndefinedValueRootIndex); - - CallConstructStub stub(isolate(), SUPER_CONSTRUCTOR_CALL); - __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL); + __ Call(isolate()->builtins()->Construct(), RelocInfo::CONSTRUCT_CALL); // Restore context register. __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); diff --git a/src/full-codegen/ia32/full-codegen-ia32.cc b/src/full-codegen/ia32/full-codegen-ia32.cc index 285975cbd..24a1113d8 100644 --- a/src/full-codegen/ia32/full-codegen-ia32.cc +++ b/src/full-codegen/ia32/full-codegen-ia32.cc @@ -3916,9 +3916,6 @@ void FullCodeGenerator::EmitDefaultConstructorCallSuper(CallRuntime* expr) { VisitForStackValue(args->at(0)); VisitForStackValue(args->at(1)); - // Load original constructor into ecx. - __ mov(ecx, Operand(esp, 1 * kPointerSize)); - // Check if the calling frame is an arguments adaptor frame. Label adaptor_frame, args_set_up, runtime; __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset)); @@ -3948,10 +3945,9 @@ void FullCodeGenerator::EmitDefaultConstructorCallSuper(CallRuntime* expr) { __ bind(&args_set_up); - __ mov(edi, Operand(esp, eax, times_pointer_size, 0)); - __ mov(ebx, Immediate(isolate()->factory()->undefined_value())); - CallConstructStub stub(isolate(), SUPER_CONSTRUCTOR_CALL); - __ call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL); + __ mov(edx, Operand(esp, eax, times_pointer_size, 1 * kPointerSize)); + __ mov(edi, Operand(esp, eax, times_pointer_size, 0 * kPointerSize)); + __ Call(isolate()->builtins()->Construct(), RelocInfo::CONSTRUCT_CALL); // Restore context register. __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); diff --git a/src/full-codegen/mips/full-codegen-mips.cc b/src/full-codegen/mips/full-codegen-mips.cc index c29404d1f..e46a6b284 100644 --- a/src/full-codegen/mips/full-codegen-mips.cc +++ b/src/full-codegen/mips/full-codegen-mips.cc @@ -4036,14 +4036,14 @@ void FullCodeGenerator::EmitDefaultConstructorCallSuper(CallRuntime* expr) { VisitForStackValue(args->at(0)); VisitForStackValue(args->at(1)); - // Load original constructor into t0. - __ lw(t0, MemOperand(sp, 1 * kPointerSize)); + // Load original constructor into a3. + __ lw(a3, MemOperand(sp, 1 * kPointerSize)); // Check if the calling frame is an arguments adaptor frame. Label adaptor_frame, args_set_up, runtime; __ lw(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); - __ lw(a3, MemOperand(a2, StandardFrameConstants::kContextOffset)); - __ Branch(&adaptor_frame, eq, a3, + __ lw(t0, MemOperand(a2, StandardFrameConstants::kContextOffset)); + __ Branch(&adaptor_frame, eq, t0, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); // default constructor has no arguments, so no adaptor frame means no args. __ mov(a0, zero_reg); @@ -4066,8 +4066,8 @@ void FullCodeGenerator::EmitDefaultConstructorCallSuper(CallRuntime* expr) { // Pre-decrement a2 with kPointerSize on each iteration. // Pre-decrement in order to skip receiver. __ Addu(a2, a2, Operand(-kPointerSize)); - __ lw(a3, MemOperand(a2)); - __ Push(a3); + __ lw(t0, MemOperand(a2)); + __ Push(t0); __ Addu(a1, a1, Operand(-1)); __ Branch(&loop, ne, a1, Operand(zero_reg)); } @@ -4076,10 +4076,7 @@ void FullCodeGenerator::EmitDefaultConstructorCallSuper(CallRuntime* expr) { __ sll(at, a0, kPointerSizeLog2); __ Addu(at, at, Operand(sp)); __ lw(a1, MemOperand(at, 0)); - __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); - - CallConstructStub stub(isolate(), SUPER_CONSTRUCTOR_CALL); - __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL); + __ Call(isolate()->builtins()->Construct(), RelocInfo::CONSTRUCT_CALL); // Restore context register. __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); diff --git a/src/full-codegen/mips64/full-codegen-mips64.cc b/src/full-codegen/mips64/full-codegen-mips64.cc index a61454ed7..c8134ffd9 100644 --- a/src/full-codegen/mips64/full-codegen-mips64.cc +++ b/src/full-codegen/mips64/full-codegen-mips64.cc @@ -4039,14 +4039,14 @@ void FullCodeGenerator::EmitDefaultConstructorCallSuper(CallRuntime* expr) { VisitForStackValue(args->at(0)); VisitForStackValue(args->at(1)); - // Load original constructor into a4. - __ ld(a4, MemOperand(sp, 1 * kPointerSize)); + // Load original constructor into a3. + __ ld(a3, MemOperand(sp, 1 * kPointerSize)); // Check if the calling frame is an arguments adaptor frame. Label adaptor_frame, args_set_up, runtime; __ ld(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); - __ ld(a3, MemOperand(a2, StandardFrameConstants::kContextOffset)); - __ Branch(&adaptor_frame, eq, a3, + __ ld(a4, MemOperand(a2, StandardFrameConstants::kContextOffset)); + __ Branch(&adaptor_frame, eq, a4, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); // default constructor has no arguments, so no adaptor frame means no args. __ mov(a0, zero_reg); @@ -4069,8 +4069,8 @@ void FullCodeGenerator::EmitDefaultConstructorCallSuper(CallRuntime* expr) { // Pre-decrement a2 with kPointerSize on each iteration. // Pre-decrement in order to skip receiver. __ Daddu(a2, a2, Operand(-kPointerSize)); - __ ld(a3, MemOperand(a2)); - __ Push(a3); + __ ld(a4, MemOperand(a2)); + __ Push(a4); __ Daddu(a1, a1, Operand(-1)); __ Branch(&loop, ne, a1, Operand(zero_reg)); } @@ -4079,10 +4079,7 @@ void FullCodeGenerator::EmitDefaultConstructorCallSuper(CallRuntime* expr) { __ dsll(at, a0, kPointerSizeLog2); __ Daddu(at, at, Operand(sp)); __ ld(a1, MemOperand(at, 0)); - __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); - - CallConstructStub stub(isolate(), SUPER_CONSTRUCTOR_CALL); - __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL); + __ Call(isolate()->builtins()->Construct(), RelocInfo::CONSTRUCT_CALL); // Restore context register. __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); diff --git a/src/full-codegen/x64/full-codegen-x64.cc b/src/full-codegen/x64/full-codegen-x64.cc index 8778899c8..6d7a5e1d0 100644 --- a/src/full-codegen/x64/full-codegen-x64.cc +++ b/src/full-codegen/x64/full-codegen-x64.cc @@ -3950,9 +3950,6 @@ void FullCodeGenerator::EmitDefaultConstructorCallSuper(CallRuntime* expr) { VisitForStackValue(args->at(0)); VisitForStackValue(args->at(1)); - // Load original constructor into rcx. - __ movp(rcx, Operand(rsp, 1 * kPointerSize)); - // Check if the calling frame is an arguments adaptor frame. Label adaptor_frame, args_set_up, runtime; __ movp(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); @@ -3981,11 +3978,9 @@ void FullCodeGenerator::EmitDefaultConstructorCallSuper(CallRuntime* expr) { } __ bind(&args_set_up); - __ movp(rdi, Operand(rsp, rax, times_pointer_size, 0)); - __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex); - - CallConstructStub stub(isolate(), SUPER_CONSTRUCTOR_CALL); - __ call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL); + __ movp(rdx, Operand(rsp, rax, times_pointer_size, 1 * kPointerSize)); + __ movp(rdi, Operand(rsp, rax, times_pointer_size, 0 * kPointerSize)); + __ Call(isolate()->builtins()->Construct(), RelocInfo::CONSTRUCT_CALL); // Restore context register. __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); diff --git a/src/globals.h b/src/globals.h index f69db94fa..2b6987fda 100644 --- a/src/globals.h +++ b/src/globals.h @@ -601,6 +601,10 @@ enum CallConstructorFlags { NO_CALL_CONSTRUCTOR_FLAGS = 0, // The call target is cached in the instruction stream. RECORD_CONSTRUCTOR_TARGET = 1, + // TODO(bmeurer): Kill these SUPER_* modes and use the Construct builtin + // directly instead; also there's no point in collecting any "targets" for + // super constructor calls, since these are known when we optimize the + // constructor that contains the super call. SUPER_CONSTRUCTOR_CALL = 1 << 1, SUPER_CALL_RECORD_TARGET = SUPER_CONSTRUCTOR_CALL | RECORD_CONSTRUCTOR_TARGET }; diff --git a/src/ia32/builtins-ia32.cc b/src/ia32/builtins-ia32.cc index e2055629d..a6d29ccf5 100644 --- a/src/ia32/builtins-ia32.cc +++ b/src/ia32/builtins-ia32.cc @@ -24,13 +24,20 @@ void Builtins::Generate_Adaptor(MacroAssembler* masm, // -- eax : number of arguments excluding receiver // -- edi : called function (only guaranteed when // extra_args requires it) - // -- esi : context // -- esp[0] : return address // -- esp[4] : last argument // -- ... // -- esp[4 * argc] : first argument (argc == eax) // -- esp[4 * (argc +1)] : receiver // ----------------------------------- + __ AssertFunction(edi); + + // Make sure we operate in the context of the called function (for example + // ConstructStubs implemented in C++ will be run in the context of the caller + // instead of the callee, due to the way that [[Construct]] is defined for + // ordinary functions). + // TODO(bmeurer): Can we make this more robust? + __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset)); // Insert extra arguments. int num_extra_args = 0; @@ -488,15 +495,16 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, { FrameScope scope(masm, StackFrame::INTERNAL); + // Setup the context (we need to use the caller context from the isolate). + ExternalReference context_address(Isolate::kContextAddress, + masm->isolate()); + __ mov(esi, Operand::StaticVariable(context_address)); + // Load the previous frame pointer (ebx) to access C arguments __ mov(ebx, Operand(ebp, 0)); - // Get the function from the frame and setup the context. - __ mov(ecx, Operand(ebx, EntryFrameConstants::kFunctionArgOffset)); - __ mov(esi, FieldOperand(ecx, JSFunction::kContextOffset)); - // Push the function and the receiver onto the stack. - __ push(ecx); + __ push(Operand(ebx, EntryFrameConstants::kFunctionArgOffset)); __ push(Operand(ebx, EntryFrameConstants::kReceiverArgOffset)); // Load the number of arguments and setup pointer to the arguments. @@ -514,7 +522,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, // Copy arguments to the stack in a loop. Label loop, entry; __ Move(ecx, Immediate(0)); - __ jmp(&entry); + __ jmp(&entry, Label::kNear); __ bind(&loop); __ mov(edx, Operand(ebx, ecx, times_4, 0)); // push parameter from argv __ push(Operand(edx, 0)); // dereference handle @@ -523,19 +531,18 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, __ cmp(ecx, eax); __ j(not_equal, &loop); - // Get the function from the stack and call it. - // kPointerSize for the receiver. - __ mov(edi, Operand(esp, eax, times_4, kPointerSize)); + // Load the previous frame pointer (ebx) to access C arguments + __ mov(ebx, Operand(ebp, 0)); + + // Get the new.target and function from the frame. + __ mov(edx, Operand(ebx, EntryFrameConstants::kNewTargetArgOffset)); + __ mov(edi, Operand(ebx, EntryFrameConstants::kFunctionArgOffset)); // Invoke the code. - if (is_construct) { - // No type feedback cell is available - __ mov(ebx, masm->isolate()->factory()->undefined_value()); - CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS); - __ CallStub(&stub); - } else { - __ Call(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); - } + Handle builtin = is_construct + ? masm->isolate()->builtins()->Construct() + : masm->isolate()->builtins()->Call(); + __ Call(builtin, RelocInfo::CODE_TARGET); // Exit the internal frame. Notice that this also removes the empty. // context and the function left on the stack by the code @@ -1552,6 +1559,68 @@ void Builtins::Generate_Call(MacroAssembler* masm) { } +// static +void Builtins::Generate_ConstructFunction(MacroAssembler* masm) { + // ----------- S t a t e ------------- + // -- eax : the number of arguments (not including the receiver) + // -- edx : the original constructor (checked to be a JSFunction) + // -- edi : the constructor to call (checked to be a JSFunction) + // ----------------------------------- + __ AssertFunction(edx); + __ AssertFunction(edi); + + // Calling convention for function specific ConstructStubs require + // ebx to contain either an AllocationSite or undefined. + __ LoadRoot(ebx, Heap::kUndefinedValueRootIndex); + + // Tail call to the function-specific construct stub (still in the caller + // context at this point). + __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); + __ mov(ecx, FieldOperand(ecx, SharedFunctionInfo::kConstructStubOffset)); + __ lea(ecx, FieldOperand(ecx, Code::kHeaderSize)); + __ jmp(ecx); +} + + +// static +void Builtins::Generate_Construct(MacroAssembler* masm) { + // ----------- S t a t e ------------- + // -- eax : the number of arguments (not including the receiver) + // -- edx : the original constructor (either the same as the constructor or + // the JSFunction on which new was invoked initially) + // -- edi : the constructor to call (can be any Object) + // ----------------------------------- + + Label slow; + __ JumpIfSmi(edi, &slow, Label::kNear); + __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx); + __ j(equal, masm->isolate()->builtins()->ConstructFunction(), + RelocInfo::CODE_TARGET); + __ CmpInstanceType(ecx, JS_FUNCTION_PROXY_TYPE); + __ j(not_equal, &slow, Label::kNear); + + // TODO(neis): This doesn't match the ES6 spec for [[Construct]] on proxies. + __ mov(edi, FieldOperand(edi, JSFunctionProxy::kConstructTrapOffset)); + __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); + + __ bind(&slow); + { + // Determine the delegate for the target (if any). + FrameScope scope(masm, StackFrame::INTERNAL); + __ SmiTag(eax); + __ Push(eax); + __ Push(edi); + __ CallRuntime(Runtime::kGetConstructorDelegate, 1); + __ mov(edi, eax); + __ Pop(eax); + __ SmiUntag(eax); + } + // The delegate is always a regular function. + __ AssertFunction(edi); + __ Jump(masm->isolate()->builtins()->CallFunction(), RelocInfo::CODE_TARGET); +} + + // static void Builtins::Generate_PushArgsAndCall(MacroAssembler* masm) { // ----------- S t a t e ------------- diff --git a/src/ia32/code-stubs-ia32.cc b/src/ia32/code-stubs-ia32.cc index 7da077962..d1efe5083 100644 --- a/src/ia32/code-stubs-ia32.cc +++ b/src/ia32/code-stubs-ia32.cc @@ -2112,17 +2112,17 @@ void CallConstructStub::Generate(MacroAssembler* masm) { // ecx : original constructor (for IsSuperConstructorCall) // edx : slot in feedback vector (Smi, for RecordCallTarget) // edi : constructor function - Label slow, non_function_call; if (IsSuperConstructorCall()) { __ push(ecx); } + Label non_function; // Check that function is not a smi. - __ JumpIfSmi(edi, &non_function_call); + __ JumpIfSmi(edi, &non_function); // Check that function is a JSFunction. __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx); - __ j(not_equal, &slow); + __ j(not_equal, &non_function); if (RecordCallTarget()) { GenerateRecordCallTarget(masm, IsSuperConstructorCall()); @@ -2148,44 +2148,17 @@ void CallConstructStub::Generate(MacroAssembler* masm) { __ mov(edx, edi); } - // Jump to the function-specific construct stub. - Register jmp_reg = ecx; - __ mov(jmp_reg, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); - __ mov(jmp_reg, FieldOperand(jmp_reg, - SharedFunctionInfo::kConstructStubOffset)); - __ lea(jmp_reg, FieldOperand(jmp_reg, Code::kHeaderSize)); - __ jmp(jmp_reg); - - // edi: called object - // eax: number of arguments - // ecx: object map - // esp[0]: original receiver (for IsSuperConstructorCall) - __ bind(&slow); - { - __ CmpInstanceType(ecx, JS_FUNCTION_PROXY_TYPE); - __ j(not_equal, &non_function_call, Label::kNear); - if (IsSuperConstructorCall()) __ Drop(1); - // TODO(neis): This doesn't match the ES6 spec for [[Construct]] on proxies. - __ mov(edi, FieldOperand(edi, JSFunctionProxy::kConstructTrapOffset)); - __ Jump(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); - - __ bind(&non_function_call); - if (IsSuperConstructorCall()) __ Drop(1); - { - // Determine the delegate for the target (if any). - FrameScope scope(masm, StackFrame::INTERNAL); - __ SmiTag(eax); - __ Push(eax); - __ Push(edi); - __ CallRuntime(Runtime::kGetConstructorDelegate, 1); - __ mov(edi, eax); - __ Pop(eax); - __ SmiUntag(eax); - } - // The delegate is always a regular function. - __ AssertFunction(edi); - __ Jump(isolate()->builtins()->CallFunction(), RelocInfo::CODE_TARGET); - } + // Tail call to the function-specific construct stub (still in the caller + // context at this point). + __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); + __ mov(ecx, FieldOperand(ecx, SharedFunctionInfo::kConstructStubOffset)); + __ lea(ecx, FieldOperand(ecx, Code::kHeaderSize)); + __ jmp(ecx); + + __ bind(&non_function); + if (IsSuperConstructorCall()) __ Drop(1); + __ mov(edx, edi); + __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); } diff --git a/src/ia32/frames-ia32.h b/src/ia32/frames-ia32.h index 1d200c04b..a5ce6a5f0 100644 --- a/src/ia32/frames-ia32.h +++ b/src/ia32/frames-ia32.h @@ -39,6 +39,7 @@ class EntryFrameConstants : public AllStatic { public: static const int kCallerFPOffset = -6 * kPointerSize; + static const int kNewTargetArgOffset = +2 * kPointerSize; static const int kFunctionArgOffset = +3 * kPointerSize; static const int kReceiverArgOffset = +4 * kPointerSize; static const int kArgcOffset = +5 * kPointerSize; diff --git a/src/mips/builtins-mips.cc b/src/mips/builtins-mips.cc index 8c01d9741..912b86003 100644 --- a/src/mips/builtins-mips.cc +++ b/src/mips/builtins-mips.cc @@ -25,12 +25,19 @@ void Builtins::Generate_Adaptor(MacroAssembler* masm, // -- a0 : number of arguments excluding receiver // -- a1 : called function (only guaranteed when // -- extra_args requires it) - // -- cp : context // -- sp[0] : last argument // -- ... // -- sp[4 * (argc - 1)] : first argument // -- sp[4 * agrc] : receiver // ----------------------------------- + __ AssertFunction(a1); + + // Make sure we operate in the context of the called function (for example + // ConstructStubs implemented in C++ will be run in the context of the caller + // instead of the callee, due to the way that [[Construct]] is defined for + // ordinary functions). + // TODO(bmeurer): Can we make this more robust? + __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); // Insert extra arguments. int num_extra_args = 0; @@ -731,7 +738,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, // Called from JSEntryStub::GenerateBody // ----------- S t a t e ------------- - // -- a0: code entry + // -- a0: new.target // -- a1: function // -- a2: receiver_pointer // -- a3: argc @@ -746,8 +753,11 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, { FrameScope scope(masm, StackFrame::INTERNAL); - // Set up the context from the function argument. - __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); + // Setup the context (we need to use the caller context from the isolate). + ExternalReference context_address(Isolate::kContextAddress, + masm->isolate()); + __ li(cp, Operand(context_address)); + __ lw(cp, MemOperand(cp)); // Push the function and the receiver onto the stack. __ Push(a1, a2); @@ -760,6 +770,9 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, // Clobbers a2. Generate_CheckStackOverflow(masm, kFunctionOffset, a3, kArgcIsUntaggedInt); + // Remember new.target. + __ mov(t1, a0); + // Copy arguments to the stack in a loop. // a3: argc // s0: argv, i.e. points to first arg @@ -777,6 +790,10 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, __ bind(&entry); __ Branch(&loop, ne, s0, Operand(t2)); + // Setup new.target and argc. + __ mov(a0, a3); + __ mov(a3, t1); + // Initialize all JavaScript callee-saved registers, since they will be seen // by the garbage collector as part of handlers. __ LoadRoot(t0, Heap::kUndefinedValueRootIndex); @@ -788,16 +805,11 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, // s6 holds the root address. Do not clobber. // s7 is cp. Do not init. - // Invoke the code and pass argc as a0. - __ mov(a0, a3); - if (is_construct) { - // No type feedback cell is available - __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); - CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS); - __ CallStub(&stub); - } else { - __ Call(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); - } + // Invoke the code. + Handle builtin = is_construct + ? masm->isolate()->builtins()->Construct() + : masm->isolate()->builtins()->Call(); + __ Call(builtin, RelocInfo::CODE_TARGET); // Leave internal frame. } @@ -1628,6 +1640,66 @@ void Builtins::Generate_Call(MacroAssembler* masm) { } +// static +void Builtins::Generate_ConstructFunction(MacroAssembler* masm) { + // ----------- S t a t e ------------- + // -- a0 : the number of arguments (not including the receiver) + // -- a1 : the constructor to call (checked to be a JSFunction) + // -- a3 : the original constructor (checked to be a JSFunction) + // ----------------------------------- + __ AssertFunction(a1); + __ AssertFunction(a3); + + // Calling convention for function specific ConstructStubs require + // a2 to contain either an AllocationSite or undefined. + __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); + + // Tail call to the function-specific construct stub (still in the caller + // context at this point). + __ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); + __ lw(t0, FieldMemOperand(t0, SharedFunctionInfo::kConstructStubOffset)); + __ Addu(at, t0, Operand(Code::kHeaderSize - kHeapObjectTag)); + __ Jump(at); +} + + +// static +void Builtins::Generate_Construct(MacroAssembler* masm) { + // ----------- S t a t e ------------- + // -- a0 : the number of arguments (not including the receiver) + // -- a1 : the constructor to call (can be any Object) + // -- a3 : the original constructor (either the same as the constructor or + // the JSFunction on which new was invoked initially) + // ----------------------------------- + + Label slow; + __ JumpIfSmi(a1, &slow); + __ GetObjectType(a1, t1, t1); + __ Jump(masm->isolate()->builtins()->ConstructFunction(), + RelocInfo::CODE_TARGET, eq, t1, Operand(JS_FUNCTION_TYPE)); + __ Branch(&slow, ne, t1, Operand(JS_FUNCTION_PROXY_TYPE)); + + // TODO(neis): This doesn't match the ES6 spec for [[Construct]] on proxies. + __ lw(a1, FieldMemOperand(a1, JSFunctionProxy::kConstructTrapOffset)); + __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); + + __ bind(&slow); + { + // Determine the delegate for the target (if any). + FrameScope scope(masm, StackFrame::INTERNAL); + __ SmiTag(a0); + __ Push(a0, a1); + __ CallRuntime(Runtime::kGetConstructorDelegate, 1); + __ mov(a1, v0); + __ Pop(a0); + __ SmiUntag(a0); + } + // The delegate is always a regular function. + __ AssertFunction(a1); + __ Jump(masm->isolate()->builtins()->CallFunction(), RelocInfo::CODE_TARGET); +} + + // static void Builtins::Generate_PushArgsAndCall(MacroAssembler* masm) { // ----------- S t a t e ------------- diff --git a/src/mips/code-stubs-mips.cc b/src/mips/code-stubs-mips.cc index bdb382369..40d77dc19 100644 --- a/src/mips/code-stubs-mips.cc +++ b/src/mips/code-stubs-mips.cc @@ -2599,13 +2599,13 @@ void CallConstructStub::Generate(MacroAssembler* masm) { // a2 : feedback vector // a3 : slot in feedback vector (Smi, for RecordCallTarget) // t0 : original constructor (for IsSuperConstructorCall) - Label slow, non_function_call; + Label non_function; // Check that the function is not a smi. - __ JumpIfSmi(a1, &non_function_call); + __ JumpIfSmi(a1, &non_function); // Check that the function is a JSFunction. __ GetObjectType(a1, t1, t1); - __ Branch(&slow, ne, t1, Operand(JS_FUNCTION_TYPE)); + __ Branch(&non_function, ne, t1, Operand(JS_FUNCTION_TYPE)); if (RecordCallTarget()) { GenerateRecordCallTarget(masm, IsSuperConstructorCall()); @@ -2631,40 +2631,16 @@ void CallConstructStub::Generate(MacroAssembler* masm) { __ mov(a3, a1); } - // Jump to the function-specific construct stub. - Register jmp_reg = t0; - __ lw(jmp_reg, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); - __ lw(jmp_reg, FieldMemOperand(jmp_reg, - SharedFunctionInfo::kConstructStubOffset)); - __ Addu(at, jmp_reg, Operand(Code::kHeaderSize - kHeapObjectTag)); + // Tail call to the function-specific construct stub (still in the caller + // context at this point). + __ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); + __ lw(t0, FieldMemOperand(t0, SharedFunctionInfo::kConstructStubOffset)); + __ Addu(at, t0, Operand(Code::kHeaderSize - kHeapObjectTag)); __ Jump(at); - // a0: number of arguments - // a1: called object - // t1: object type - __ bind(&slow); - { - __ Branch(&non_function_call, ne, t1, Operand(JS_FUNCTION_PROXY_TYPE)); - // TODO(neis): This doesn't match the ES6 spec for [[Construct]] on proxies. - __ lw(a1, FieldMemOperand(a1, JSFunctionProxy::kConstructTrapOffset)); - __ Jump(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); - - __ bind(&non_function_call); - { - // Determine the delegate for the target (if any). - FrameScope scope(masm, StackFrame::INTERNAL); - __ SmiTag(a0); - __ Push(a0, a1); - __ CallRuntime(Runtime::kGetConstructorDelegate, 1); - __ mov(a1, v0); - __ Pop(a0); - __ SmiUntag(a0); - } - // The delegate is always a regular function. - __ AssertFunction(a1); - __ Jump(masm->isolate()->builtins()->CallFunction(), - RelocInfo::CODE_TARGET); - } + __ bind(&non_function); + __ mov(a3, a1); + __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); } diff --git a/src/mips64/builtins-mips64.cc b/src/mips64/builtins-mips64.cc index fc937f64d..d23a0f164 100644 --- a/src/mips64/builtins-mips64.cc +++ b/src/mips64/builtins-mips64.cc @@ -24,12 +24,19 @@ void Builtins::Generate_Adaptor(MacroAssembler* masm, // -- a0 : number of arguments excluding receiver // -- a1 : called function (only guaranteed when // -- extra_args requires it) - // -- cp : context // -- sp[0] : last argument // -- ... // -- sp[8 * (argc - 1)] : first argument // -- sp[8 * agrc] : receiver // ----------------------------------- + __ AssertFunction(a1); + + // Make sure we operate in the context of the called function (for example + // ConstructStubs implemented in C++ will be run in the context of the caller + // instead of the callee, due to the way that [[Construct]] is defined for + // ordinary functions). + // TODO(bmeurer): Can we make this more robust? + __ ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); // Insert extra arguments. int num_extra_args = 0; @@ -730,7 +737,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, // Called from JSEntryStub::GenerateBody // ----------- S t a t e ------------- - // -- a0: code entry + // -- a0: new.target // -- a1: function // -- a2: receiver_pointer // -- a3: argc @@ -744,8 +751,11 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, { FrameScope scope(masm, StackFrame::INTERNAL); - // Set up the context from the function argument. - __ ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); + // Setup the context (we need to use the caller context from the isolate). + ExternalReference context_address(Isolate::kContextAddress, + masm->isolate()); + __ li(cp, Operand(context_address)); + __ ld(cp, MemOperand(cp)); // Push the function and the receiver onto the stack. __ Push(a1, a2); @@ -758,6 +768,9 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, // Clobbers a2. Generate_CheckStackOverflow(masm, kFunctionOffset, a3, kArgcIsUntaggedInt); + // Remember new.target. + __ mov(a5, a0); + // Copy arguments to the stack in a loop. // a3: argc // s0: argv, i.e. points to first arg @@ -775,6 +788,10 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, __ bind(&entry); __ Branch(&loop, ne, s0, Operand(a6)); + // Setup new.target and argc. + __ mov(a0, a3); + __ mov(a3, a5); + // Initialize all JavaScript callee-saved registers, since they will be seen // by the garbage collector as part of handlers. __ LoadRoot(a4, Heap::kUndefinedValueRootIndex); @@ -786,16 +803,11 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, // s6 holds the root address. Do not clobber. // s7 is cp. Do not init. - // Invoke the code and pass argc as a0. - __ mov(a0, a3); - if (is_construct) { - // No type feedback cell is available - __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); - CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS); - __ CallStub(&stub); - } else { - __ Call(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); - } + // Invoke the code. + Handle builtin = is_construct + ? masm->isolate()->builtins()->Construct() + : masm->isolate()->builtins()->Call(); + __ Call(builtin, RelocInfo::CODE_TARGET); // Leave internal frame. } @@ -1624,6 +1636,65 @@ void Builtins::Generate_Call(MacroAssembler* masm) { } +void Builtins::Generate_ConstructFunction(MacroAssembler* masm) { + // ----------- S t a t e ------------- + // -- a0 : the number of arguments (not including the receiver) + // -- a1 : the constructor to call (checked to be a JSFunction) + // -- a3 : the original constructor (checked to be a JSFunction) + // ----------------------------------- + __ AssertFunction(a1); + __ AssertFunction(a3); + + // Calling convention for function specific ConstructStubs require + // a2 to contain either an AllocationSite or undefined. + __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); + + // Tail call to the function-specific construct stub (still in the caller + // context at this point). + __ ld(a4, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); + __ ld(a4, FieldMemOperand(a4, SharedFunctionInfo::kConstructStubOffset)); + __ Daddu(at, a4, Operand(Code::kHeaderSize - kHeapObjectTag)); + __ Jump(at); +} + + +// static +void Builtins::Generate_Construct(MacroAssembler* masm) { + // ----------- S t a t e ------------- + // -- a0 : the number of arguments (not including the receiver) + // -- a1 : the constructor to call (can be any Object) + // -- a3 : the original constructor (either the same as the constructor or + // the JSFunction on which new was invoked initially) + // ----------------------------------- + + Label slow; + __ JumpIfSmi(a1, &slow); + __ GetObjectType(a1, a5, a5); + __ Jump(masm->isolate()->builtins()->ConstructFunction(), + RelocInfo::CODE_TARGET, eq, a5, Operand(JS_FUNCTION_TYPE)); + __ Branch(&slow, ne, a5, Operand(JS_FUNCTION_PROXY_TYPE)); + + // TODO(neis): This doesn't match the ES6 spec for [[Construct]] on proxies. + __ ld(a1, FieldMemOperand(a1, JSFunctionProxy::kConstructTrapOffset)); + __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); + + __ bind(&slow); + { + // Determine the delegate for the target (if any). + FrameScope scope(masm, StackFrame::INTERNAL); + __ SmiTag(a0); + __ Push(a0, a1); + __ CallRuntime(Runtime::kGetConstructorDelegate, 1); + __ mov(a1, v0); + __ Pop(a0); + __ SmiUntag(a0); + } + // The delegate is always a regular function. + __ AssertFunction(a1); + __ Jump(masm->isolate()->builtins()->CallFunction(), RelocInfo::CODE_TARGET); +} + + // static void Builtins::Generate_PushArgsAndCall(MacroAssembler* masm) { // ----------- S t a t e ------------- diff --git a/src/mips64/code-stubs-mips64.cc b/src/mips64/code-stubs-mips64.cc index f139636ca..87a077f81 100644 --- a/src/mips64/code-stubs-mips64.cc +++ b/src/mips64/code-stubs-mips64.cc @@ -2637,12 +2637,13 @@ void CallConstructStub::Generate(MacroAssembler* masm) { // a2 : feedback vector // a3 : slot in feedback vector (Smi, for RecordCallTarget) // a4 : original constructor (for IsSuperConstructorCall) - Label slow, non_function_call; + + Label non_function; // Check that the function is not a smi. - __ JumpIfSmi(a1, &non_function_call); + __ JumpIfSmi(a1, &non_function); // Check that the function is a JSFunction. __ GetObjectType(a1, a5, a5); - __ Branch(&slow, ne, a5, Operand(JS_FUNCTION_TYPE)); + __ Branch(&non_function, ne, a5, Operand(JS_FUNCTION_TYPE)); if (RecordCallTarget()) { GenerateRecordCallTarget(masm, IsSuperConstructorCall()); @@ -2668,40 +2669,16 @@ void CallConstructStub::Generate(MacroAssembler* masm) { __ mov(a3, a1); } - // Jump to the function-specific construct stub. - Register jmp_reg = a4; - __ ld(jmp_reg, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); - __ ld(jmp_reg, FieldMemOperand(jmp_reg, - SharedFunctionInfo::kConstructStubOffset)); - __ Daddu(at, jmp_reg, Operand(Code::kHeaderSize - kHeapObjectTag)); + // Tail call to the function-specific construct stub (still in the caller + // context at this point). + __ ld(a4, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); + __ ld(a4, FieldMemOperand(a4, SharedFunctionInfo::kConstructStubOffset)); + __ Daddu(at, a4, Operand(Code::kHeaderSize - kHeapObjectTag)); __ Jump(at); - // a0: number of arguments - // a1: called object - // a5: object type - __ bind(&slow); - { - __ Branch(&non_function_call, ne, a5, Operand(JS_FUNCTION_PROXY_TYPE)); - // TODO(neis): This doesn't match the ES6 spec for [[Construct]] on proxies. - __ ld(a1, FieldMemOperand(a1, JSFunctionProxy::kConstructTrapOffset)); - __ Jump(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); - - __ bind(&non_function_call); - { - // Determine the delegate for the target (if any). - FrameScope scope(masm, StackFrame::INTERNAL); - __ SmiTag(a0); - __ Push(a0, a1); - __ CallRuntime(Runtime::kGetConstructorDelegate, 1); - __ mov(a1, v0); - __ Pop(a0); - __ SmiUntag(a0); - } - // The delegate is always a regular function. - __ AssertFunction(a1); - __ Jump(masm->isolate()->builtins()->CallFunction(), - RelocInfo::CODE_TARGET); - } + __ bind(&non_function); + __ mov(a3, a1); + __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); } diff --git a/src/runtime/runtime-classes.cc b/src/runtime/runtime-classes.cc index 5f0981486..6c4f47091 100644 --- a/src/runtime/runtime-classes.cc +++ b/src/runtime/runtime-classes.cc @@ -525,25 +525,13 @@ RUNTIME_FUNCTION(Runtime_DefaultConstructorCallSuper) { base::SmartArrayPointer> arguments = Runtime::GetCallerArguments(isolate, 0, &argument_count); - // Prepare the array containing all passed arguments. - Handle elements = - isolate->factory()->NewUninitializedFixedArray(argument_count); - for (int i = 0; i < argument_count; ++i) { - elements->set(i, *arguments[i]); - } - Handle array = isolate->factory()->NewJSArrayWithElements( - elements, FAST_ELEMENTS, argument_count); - - // Call %reflect_construct(, , ) now. - Handle reflect = isolate->reflect_construct(); - Handle argv[] = {super_constructor, array, original_constructor}; Handle result; ASSIGN_RETURN_FAILURE_ON_EXCEPTION( - isolate, result, - Execution::Call(isolate, reflect, isolate->factory()->undefined_value(), - arraysize(argv), argv)); + isolate, result, Execution::New(super_constructor, original_constructor, + argument_count, arguments.get())); return *result; } + } // namespace internal } // namespace v8 diff --git a/src/x64/builtins-x64.cc b/src/x64/builtins-x64.cc index 91db4c61f..e8cc4381b 100644 --- a/src/x64/builtins-x64.cc +++ b/src/x64/builtins-x64.cc @@ -23,13 +23,20 @@ void Builtins::Generate_Adaptor(MacroAssembler* masm, // -- rax : number of arguments excluding receiver // -- rdi : called function (only guaranteed when // extra_args requires it) - // -- rsi : context // -- rsp[0] : return address // -- rsp[8] : last argument // -- ... // -- rsp[8 * argc] : first argument (argc == rax) // -- rsp[8 * (argc + 1)] : receiver // ----------------------------------- + __ AssertFunction(rdi); + + // Make sure we operate in the context of the called function (for example + // ConstructStubs implemented in C++ will be run in the context of the caller + // instead of the callee, due to the way that [[Construct]] is defined for + // ordinary functions). + // TODO(bmeurer): Can we make this more robust? + __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); // Insert extra arguments. int num_extra_args = 0; @@ -441,7 +448,7 @@ void Builtins::Generate_JSConstructStubForDerived(MacroAssembler* masm) { enum IsTagged { kRaxIsSmiTagged, kRaxIsUntaggedInt }; -// Clobbers rcx, rdx, kScratchRegister; preserves all other registers. +// Clobbers rcx, r11, kScratchRegister; preserves all other registers. static void Generate_CheckStackOverflow(MacroAssembler* masm, const int calleeOffset, IsTagged rax_is_tagged) { @@ -456,17 +463,17 @@ static void Generate_CheckStackOverflow(MacroAssembler* masm, // Make rcx the space we have left. The stack might already be overflowed // here which will cause rcx to become negative. __ subp(rcx, kScratchRegister); - // Make rdx the space we need for the array when it is unrolled onto the + // Make r11 the space we need for the array when it is unrolled onto the // stack. if (rax_is_tagged == kRaxIsSmiTagged) { - __ PositiveSmiTimesPowerOfTwoToInteger64(rdx, rax, kPointerSizeLog2); + __ PositiveSmiTimesPowerOfTwoToInteger64(r11, rax, kPointerSizeLog2); } else { DCHECK(rax_is_tagged == kRaxIsUntaggedInt); - __ movp(rdx, rax); - __ shlq(rdx, Immediate(kPointerSizeLog2)); + __ movp(r11, rax); + __ shlq(r11, Immediate(kPointerSizeLog2)); } // Check if the arguments will overflow the stack. - __ cmpp(rcx, rdx); + __ cmpp(rcx, r11); __ j(greater, &okay); // Signed comparison. // Out of stack space. @@ -486,8 +493,8 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, ProfileEntryHookStub::MaybeCallEntryHook(masm); // Expects five C++ function parameters. - // - Address entry (ignored) - // - JSFunction* function ( + // - Object* new_target + // - JSFunction* function // - Object* receiver // - int argc // - Object*** argv @@ -498,11 +505,12 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, // Platform specific argument handling. After this, the stack contains // an internal frame and the pushed function and receiver, and // register rax and rbx holds the argument count and argument array, - // while rdi holds the function pointer and rsi the context. + // while rdi holds the function pointer, rsi the context, and rdx the + // new.target. #ifdef _WIN64 // MSVC parameters in: - // rcx : entry (ignored) + // rcx : new_target // rdx : function // r8 : receiver // r9 : argc @@ -510,11 +518,14 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, // Clear the context before we push it when entering the internal frame. __ Set(rsi, 0); + // Enter an internal frame. FrameScope scope(masm, StackFrame::INTERNAL); - // Load the function context into rsi. - __ movp(rsi, FieldOperand(rdx, JSFunction::kContextOffset)); + // Setup the context (we need to use the caller context from the isolate). + ExternalReference context_address(Isolate::kContextAddress, + masm->isolate()); + __ movp(rsi, masm->ExternalOperand(context_address)); // Push the function and the receiver onto the stack. __ Push(rdx); @@ -527,30 +538,42 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, __ movp(rbx, Operand(kScratchRegister, EntryFrameConstants::kArgvOffset)); // Load the function pointer into rdi. __ movp(rdi, rdx); + // Load the new.target into rdx. + __ movp(rdx, rcx); #else // _WIN64 // GCC parameters in: - // rdi : entry (ignored) + // rdi : new_target // rsi : function // rdx : receiver // rcx : argc // r8 : argv + __ movp(r11, rdi); __ movp(rdi, rsi); // rdi : function + // r11 : new_target // Clear the context before we push it when entering the internal frame. __ Set(rsi, 0); + // Enter an internal frame. FrameScope scope(masm, StackFrame::INTERNAL); - // Push the function and receiver and setup the context. + // Setup the context (we need to use the caller context from the isolate). + ExternalReference context_address(Isolate::kContextAddress, + masm->isolate()); + __ movp(rsi, masm->ExternalOperand(context_address)); + + // Push the function and receiver onto the stack. __ Push(rdi); __ Push(rdx); - __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); // Load the number of arguments and setup pointer to the arguments. __ movp(rax, rcx); __ movp(rbx, r8); + + // Load the new.target into rdx. + __ movp(rdx, r11); #endif // _WIN64 // Current stack contents: @@ -562,13 +585,14 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, // rbx : argv // rsi : context // rdi : function + // rdx : new.target // Check if we have enough stack space to push all arguments. // The function is the first thing that was pushed above after entering // the internal frame. const int kFunctionOffset = InternalFrameConstants::kCodeOffset - kRegisterSize; - // Expects argument count in rax. Clobbers rcx, rdx. + // Expects argument count in rax. Clobbers rcx, r11. Generate_CheckStackOverflow(masm, kFunctionOffset, kRaxIsUntaggedInt); // Copy arguments to the stack in a loop. @@ -576,7 +600,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, // Push the values of these handles. Label loop, entry; __ Set(rcx, 0); // Set loop variable to 0. - __ jmp(&entry); + __ jmp(&entry, Label::kNear); __ bind(&loop); __ movp(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0)); __ Push(Operand(kScratchRegister, 0)); // dereference handle @@ -585,16 +609,12 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, __ cmpp(rcx, rax); __ j(not_equal, &loop); - // Invoke the code. - if (is_construct) { - // No type feedback cell is available - __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex); - // Expects rdi to hold function pointer. - CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS); - __ CallStub(&stub); - } else { - __ Call(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); - } + // Invoke the builtin code. + Handle builtin = is_construct + ? masm->isolate()->builtins()->Construct() + : masm->isolate()->builtins()->Call(); + __ Call(builtin, RelocInfo::CODE_TARGET); + // Exit the internal frame. Notice that this also removes the empty // context and the function left on the stack by the code // invocation. @@ -1695,7 +1715,7 @@ void Builtins::Generate_CallFunction(MacroAssembler* masm) { void Builtins::Generate_Call(MacroAssembler* masm) { // ----------- S t a t e ------------- // -- rax : the number of arguments (not including the receiver) - // -- rdi : the target to call (can be any Object). + // -- rdi : the target to call (can be any Object) // ----------------------------------- Label non_smi, non_function; @@ -1742,6 +1762,68 @@ void Builtins::Generate_Call(MacroAssembler* masm) { } +// static +void Builtins::Generate_ConstructFunction(MacroAssembler* masm) { + // ----------- S t a t e ------------- + // -- rax : the number of arguments (not including the receiver) + // -- rdx : the original constructor (checked to be a JSFunction) + // -- rdi : the constructor to call (checked to be a JSFunction) + // ----------------------------------- + __ AssertFunction(rdx); + __ AssertFunction(rdi); + + // Calling convention for function specific ConstructStubs require + // rbx to contain either an AllocationSite or undefined. + __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex); + + // Tail call to the function-specific construct stub (still in the caller + // context at this point). + __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); + __ movp(rcx, FieldOperand(rcx, SharedFunctionInfo::kConstructStubOffset)); + __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize)); + __ jmp(rcx); +} + + +// static +void Builtins::Generate_Construct(MacroAssembler* masm) { + // ----------- S t a t e ------------- + // -- rax : the number of arguments (not including the receiver) + // -- rdx : the original constructor (either the same as the constructor or + // the JSFunction on which new was invoked initially) + // -- rdi : the constructor to call (can be any Object) + // ----------------------------------- + + Label slow; + __ JumpIfSmi(rdi, &slow, Label::kNear); + __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); + __ j(equal, masm->isolate()->builtins()->ConstructFunction(), + RelocInfo::CODE_TARGET); + __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE); + __ j(not_equal, &slow, Label::kNear); + + // TODO(neis): This doesn't match the ES6 spec for [[Construct]] on proxies. + __ movp(rdi, FieldOperand(rdi, JSFunctionProxy::kConstructTrapOffset)); + __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); + + __ bind(&slow); + { + // Determine the delegate for the target (if any). + FrameScope scope(masm, StackFrame::INTERNAL); + __ Integer32ToSmi(rax, rax); + __ Push(rax); + __ Push(rdi); + __ CallRuntime(Runtime::kGetConstructorDelegate, 1); + __ movp(rdi, rax); + __ Pop(rax); + __ SmiToInteger32(rax, rax); + } + // The delegate is always a regular function. + __ AssertFunction(rdi); + __ Jump(masm->isolate()->builtins()->CallFunction(), RelocInfo::CODE_TARGET); +} + + // static void Builtins::Generate_PushArgsAndCall(MacroAssembler* masm) { // ----------- S t a t e ------------- diff --git a/src/x64/code-stubs-x64.cc b/src/x64/code-stubs-x64.cc index cda5d4f9f..59d3d6a58 100644 --- a/src/x64/code-stubs-x64.cc +++ b/src/x64/code-stubs-x64.cc @@ -1977,13 +1977,13 @@ void CallConstructStub::Generate(MacroAssembler* masm) { // rcx : original constructor (for IsSuperConstructorCall) // rdx : slot in feedback vector (Smi, for RecordCallTarget) // rdi : constructor function - Label slow, non_function_call; - // Check that function is not a smi. - __ JumpIfSmi(rdi, &non_function_call); - // Check that function is a JSFunction. + Label non_function; + // Check that the constructor is not a smi. + __ JumpIfSmi(rdi, &non_function); + // Check that constructor is a JSFunction. __ CmpObjectType(rdi, JS_FUNCTION_TYPE, r11); - __ j(not_equal, &slow); + __ j(not_equal, &non_function); if (RecordCallTarget()) { GenerateRecordCallTarget(masm, IsSuperConstructorCall()); @@ -2008,42 +2008,16 @@ void CallConstructStub::Generate(MacroAssembler* masm) { __ movp(rdx, rdi); } - // Jump to the function-specific construct stub. - Register jmp_reg = rcx; - __ movp(jmp_reg, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); - __ movp(jmp_reg, FieldOperand(jmp_reg, - SharedFunctionInfo::kConstructStubOffset)); - __ leap(jmp_reg, FieldOperand(jmp_reg, Code::kHeaderSize)); - __ jmp(jmp_reg); - - // rdi: called object - // rax: number of arguments - // r11: object map - __ bind(&slow); - { - __ CmpInstanceType(r11, JS_FUNCTION_PROXY_TYPE); - __ j(not_equal, &non_function_call, Label::kNear); - - // TODO(neis): This doesn't match the ES6 spec for [[Construct]] on proxies. - __ movp(rdi, FieldOperand(rdi, JSFunctionProxy::kConstructTrapOffset)); - __ Jump(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); + // Tail call to the function-specific construct stub (still in the caller + // context at this point). + __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); + __ movp(rcx, FieldOperand(rcx, SharedFunctionInfo::kConstructStubOffset)); + __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize)); + __ jmp(rcx); - __ bind(&non_function_call); - { - // Determine the delegate for the target (if any). - FrameScope scope(masm, StackFrame::INTERNAL); - __ Integer32ToSmi(rax, rax); - __ Push(rax); - __ Push(rdi); - __ CallRuntime(Runtime::kGetConstructorDelegate, 1); - __ movp(rdi, rax); - __ Pop(rax); - __ SmiToInteger32(rax, rax); - } - // The delegate is always a regular function. - __ AssertFunction(rdi); - __ Jump(isolate()->builtins()->CallFunction(), RelocInfo::CODE_TARGET); - } + __ bind(&non_function); + __ movp(rdx, rdi); + __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); }