From: bmeurer Date: Tue, 8 Sep 2015 06:12:17 +0000 (-0700) Subject: Revert of [builtins] Unify the various versions of [[Call]] with a Call builtin.... X-Git-Tag: upstream/4.7.83~412 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=298d4a6b7636f9c8aace971e8fe35500fbd40392;p=platform%2Fupstream%2Fv8.git Revert of [builtins] Unify the various versions of [[Call]] with a Call builtin. (patchset #10 id:260001 of https://codereview.chromium.org/1311013008/ ) Reason for revert: Breaks nosnap, needs investigation Original issue's description: > [builtins] Unify the various versions of [[Call]] with a Call builtin. > > The new Call and CallFunction builtins supersede the current > CallFunctionStub (and CallIC magic) and will be the single bottleneck > for all calling, including the currently special Function.prototype.call > and Function.prototype.apply builtins, which had handwritten (and > not fully compliant) versions of CallFunctionStub, and also the > CallIC(s), which where also slightly different. > > This also reduces the overhead for API function calls, which is still > unnecessary high, but let's do that step-by-step. > > This also fixes a bunch of cases where the implicit ToObject for > sloppy receivers was done in the wrong context (in the caller > context instead of the callee context), which basically meant > that we allowed cross context access to %ObjectPrototype%. > > MIPS and MIPS64 ports contributed by akos.palfi@imgtec.com. > > R=mstarzinger@chromium.org, jarin@chromium.org, mvstanton@chromium.org > CQ_INCLUDE_TRYBOTS=tryserver.v8:v8_linux_layout_dbg > BUG=v8:4413 > LOG=n > > Committed: https://crrev.com/ef268a83be4dead004047c25b702319ea4be7277 > Cr-Commit-Position: refs/heads/master@{#30627} TBR=rmcilroy@chromium.org,jarin@chromium.org,mstarzinger@chromium.org,mvstanton@chromium.org NOPRESUBMIT=true NOTREECHECKS=true NOTRY=true BUG=v8:4413 Review URL: https://codereview.chromium.org/1328963004 Cr-Commit-Position: refs/heads/master@{#30628} --- diff --git a/src/arm/builtins-arm.cc b/src/arm/builtins-arm.cc index f63e05f2c..ab2d77e28 100644 --- a/src/arm/builtins-arm.cc +++ b/src/arm/builtins-arm.cc @@ -1256,30 +1256,127 @@ void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) { } -// static void Builtins::Generate_FunctionCall(MacroAssembler* masm) { // 1. Make sure we have at least one argument. // r0: actual number of arguments - { - Label done; + { Label done; __ cmp(r0, Operand::Zero()); __ b(ne, &done); - __ PushRoot(Heap::kUndefinedValueRootIndex); + __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); + __ push(r2); __ add(r0, r0, Operand(1)); __ bind(&done); } - // 2. Get the callable to call (passed as receiver) from the stack. + // 2. Get the function to call (passed as receiver) from the stack, check + // if it is a function. // r0: actual number of arguments + Label slow, non_function; __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2)); + __ JumpIfSmi(r1, &non_function); + __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE); + __ b(ne, &slow); + + // 3a. Patch the first argument if necessary when calling a function. + // r0: actual number of arguments + // r1: function + Label shift_arguments; + __ mov(r4, Operand::Zero()); // indicate regular JS_FUNCTION + { Label convert_to_object, use_global_proxy, patch_receiver; + // Change context eagerly in case we need the global receiver. + __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset)); + + // Do not transform the receiver for strict mode functions. + __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); + __ ldr(r3, FieldMemOperand(r2, SharedFunctionInfo::kCompilerHintsOffset)); + __ tst(r3, Operand(1 << (SharedFunctionInfo::kStrictModeFunction + + kSmiTagSize))); + __ b(ne, &shift_arguments); + + // Do not transform the receiver for native (Compilerhints already in r3). + __ tst(r3, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize))); + __ b(ne, &shift_arguments); + + // Compute the receiver in sloppy mode. + __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2)); + __ ldr(r2, MemOperand(r2, -kPointerSize)); + // r0: actual number of arguments + // r1: function + // r2: first argument + __ JumpIfSmi(r2, &convert_to_object); + + __ LoadRoot(r3, Heap::kUndefinedValueRootIndex); + __ cmp(r2, r3); + __ b(eq, &use_global_proxy); + __ LoadRoot(r3, Heap::kNullValueRootIndex); + __ cmp(r2, r3); + __ b(eq, &use_global_proxy); + + STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); + __ CompareObjectType(r2, r3, r3, FIRST_SPEC_OBJECT_TYPE); + __ b(ge, &shift_arguments); + + __ bind(&convert_to_object); + + { + // Enter an internal frame in order to preserve argument count. + FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); + __ SmiTag(r0); + __ push(r0); + + __ mov(r0, r2); + ToObjectStub stub(masm->isolate()); + __ CallStub(&stub); + __ mov(r2, r0); + + __ pop(r0); + __ SmiUntag(r0); + + // Exit the internal frame. + } + + // Restore the function to r1, and the flag to r4. + __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2)); + __ mov(r4, Operand::Zero()); + __ jmp(&patch_receiver); - // 3. Shift arguments and return address one slot down on the stack + __ bind(&use_global_proxy); + __ ldr(r2, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); + __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalProxyOffset)); + + __ bind(&patch_receiver); + __ add(r3, sp, Operand(r0, LSL, kPointerSizeLog2)); + __ str(r2, MemOperand(r3, -kPointerSize)); + + __ jmp(&shift_arguments); + } + + // 3b. Check for function proxy. + __ bind(&slow); + __ mov(r4, Operand(1, RelocInfo::NONE32)); // indicate function proxy + __ cmp(r2, Operand(JS_FUNCTION_PROXY_TYPE)); + __ b(eq, &shift_arguments); + __ bind(&non_function); + __ mov(r4, Operand(2, RelocInfo::NONE32)); // indicate non-function + + // 3c. Patch the first argument when calling a non-function. The + // CALL_NON_FUNCTION builtin expects the non-function callee as + // receiver, so overwrite the first argument which will ultimately + // become the receiver. + // r0: actual number of arguments + // r1: function + // r4: call type (0: JS function, 1: function proxy, 2: non-function) + __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2)); + __ str(r1, MemOperand(r2, -kPointerSize)); + + // 4. Shift arguments and return address one slot down on the stack // (overwriting the original receiver). Adjust argument count to make // the original first argument the new receiver. // r0: actual number of arguments - // r1: callable - { - Label loop; + // r1: function + // r4: call type (0: JS function, 1: function proxy, 2: non-function) + __ bind(&shift_arguments); + { Label loop; // Calculate the copy start address (destination). Copy end address is sp. __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2)); @@ -1295,8 +1392,49 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) { __ pop(); } - // 4. Call the callable. - __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); + // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin, + // or a function proxy via CALL_FUNCTION_PROXY. + // r0: actual number of arguments + // r1: function + // r4: call type (0: JS function, 1: function proxy, 2: non-function) + { Label function, non_proxy; + __ tst(r4, r4); + __ b(eq, &function); + // Expected number of arguments is 0 for CALL_NON_FUNCTION. + __ mov(r2, Operand::Zero()); + __ cmp(r4, Operand(1)); + __ b(ne, &non_proxy); + + __ push(r1); // re-add proxy object as additional argument + __ add(r0, r0, Operand(1)); + __ GetBuiltinFunction(r1, Context::CALL_FUNCTION_PROXY_BUILTIN_INDEX); + __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), + RelocInfo::CODE_TARGET); + + __ bind(&non_proxy); + __ GetBuiltinFunction(r1, Context::CALL_NON_FUNCTION_BUILTIN_INDEX); + __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), + RelocInfo::CODE_TARGET); + __ bind(&function); + } + + // 5b. Get the code to call from the function and check that the number of + // expected arguments matches what we're providing. If so, jump + // (tail-call) to the code in register edx without checking arguments. + // r0: actual number of arguments + // r1: function + __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); + __ ldr(r2, + FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset)); + __ SmiUntag(r2); + __ cmp(r2, r0); // Check formal and actual parameter counts. + __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), + RelocInfo::CODE_TARGET, + ne); + + __ ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset)); + ParameterCount expected(0); + __ InvokeCode(r3, expected, expected, JUMP_FUNCTION, NullCallWrapper()); } @@ -1361,8 +1499,9 @@ static void Generate_ApplyHelper(MacroAssembler* masm, bool targetIsArgument) { const int kFunctionOffset = kReceiverOffset + kPointerSize; __ ldr(r0, MemOperand(fp, kFunctionOffset)); // get the function - __ ldr(r1, MemOperand(fp, kArgumentsOffset)); // get the args array - __ Push(r0, r1); + __ push(r0); + __ ldr(r0, MemOperand(fp, kArgumentsOffset)); // get the args array + __ push(r0); if (targetIsArgument) { __ InvokeBuiltin(Context::REFLECT_APPLY_PREPARE_BUILTIN_INDEX, CALL_FUNCTION); @@ -1377,18 +1516,91 @@ static void Generate_ApplyHelper(MacroAssembler* masm, bool targetIsArgument) { StandardFrameConstants::kExpressionsOffset - (2 * kPointerSize); const int kLimitOffset = StandardFrameConstants::kExpressionsOffset - (1 * kPointerSize); - __ mov(r1, Operand::Zero()); - __ ldr(r2, MemOperand(fp, kReceiverOffset)); - __ Push(r0, r1, r2); // limit, initial index and receiver. + __ push(r0); // limit + __ mov(r1, Operand::Zero()); // initial index + __ push(r1); + + // Get the receiver. + __ ldr(r0, MemOperand(fp, kReceiverOffset)); + + // Check that the function is a JS function (otherwise it must be a proxy). + Label push_receiver; + __ ldr(r1, MemOperand(fp, kFunctionOffset)); + __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE); + __ b(ne, &push_receiver); + + // Change context eagerly to get the right global object if necessary. + __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset)); + // Load the shared function info while the function is still in r1. + __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); + + // Compute the receiver. + // Do not transform the receiver for strict mode functions. + Label call_to_object, use_global_proxy; + __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kCompilerHintsOffset)); + __ tst(r2, Operand(1 << (SharedFunctionInfo::kStrictModeFunction + + kSmiTagSize))); + __ b(ne, &push_receiver); + + // Do not transform the receiver for strict mode functions. + __ tst(r2, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize))); + __ b(ne, &push_receiver); + + // Compute the receiver in sloppy mode. + __ JumpIfSmi(r0, &call_to_object); + __ LoadRoot(r1, Heap::kNullValueRootIndex); + __ cmp(r0, r1); + __ b(eq, &use_global_proxy); + __ LoadRoot(r1, Heap::kUndefinedValueRootIndex); + __ cmp(r0, r1); + __ b(eq, &use_global_proxy); + + // Check if the receiver is already a JavaScript object. + // r0: receiver + STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); + __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE); + __ b(ge, &push_receiver); + + // Convert the receiver to a regular object. + // r0: receiver + __ bind(&call_to_object); + ToObjectStub stub(masm->isolate()); + __ CallStub(&stub); + __ b(&push_receiver); + + __ bind(&use_global_proxy); + __ ldr(r0, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); + __ ldr(r0, FieldMemOperand(r0, GlobalObject::kGlobalProxyOffset)); + + // Push the receiver. + // r0: receiver + __ bind(&push_receiver); + __ push(r0); // Copy all arguments from the array to the stack. - Generate_PushAppliedArguments(masm, kArgumentsOffset, kIndexOffset, - kLimitOffset); + Generate_PushAppliedArguments( + masm, kArgumentsOffset, kIndexOffset, kLimitOffset); - // Call the callable. - // TODO(bmeurer): This should be a tail call according to ES6. + // Call the function. + Label call_proxy; + ParameterCount actual(r0); __ ldr(r1, MemOperand(fp, kFunctionOffset)); - __ Call(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); + __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE); + __ b(ne, &call_proxy); + __ InvokeFunction(r1, actual, CALL_FUNCTION, NullCallWrapper()); + + frame_scope.GenerateLeaveFrame(); + __ add(sp, sp, Operand(kStackSize * kPointerSize)); + __ Jump(lr); + + // Call the function proxy. + __ bind(&call_proxy); + __ push(r1); // add function proxy as last argument + __ add(r0, r0, Operand(1)); + __ mov(r2, Operand::Zero()); + __ GetBuiltinFunction(r1, Context::CALL_FUNCTION_PROXY_BUILTIN_INDEX); + __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), + RelocInfo::CODE_TARGET); // Tear down the internal frame and remove function, receiver and args. } @@ -1521,141 +1733,6 @@ static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) { } -// static -void Builtins::Generate_CallFunction(MacroAssembler* masm) { - // ----------- S t a t e ------------- - // -- r0 : the number of arguments (not including the receiver) - // -- r1 : the function to call (checked to be a JSFunction) - // ----------------------------------- - - Label convert, convert_global_proxy, convert_to_object, done_convert; - __ AssertFunction(r1); - // TODO(bmeurer): Throw a TypeError if function's [[FunctionKind]] internal - // slot is "classConstructor". - // Enter the context of the function; ToObject has to run in the function - // context, and we also need to take the global proxy from the function - // context in case of conversion. - // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList) - STATIC_ASSERT(SharedFunctionInfo::kNativeByteOffset == - SharedFunctionInfo::kStrictModeByteOffset); - __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset)); - __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); - // We need to convert the receiver for non-native sloppy mode functions. - __ ldrb(r3, FieldMemOperand(r2, SharedFunctionInfo::kNativeByteOffset)); - __ tst(r3, Operand((1 << SharedFunctionInfo::kNativeBitWithinByte) | - (1 << SharedFunctionInfo::kStrictModeBitWithinByte))); - __ b(ne, &done_convert); - { - __ ldr(r3, MemOperand(sp, r0, LSL, kPointerSizeLog2)); - - // ----------- S t a t e ------------- - // -- r0 : the number of arguments (not including the receiver) - // -- r1 : the function to call (checked to be a JSFunction) - // -- r2 : the shared function info. - // -- r3 : the receiver - // -- cp : the function context. - // ----------------------------------- - - Label convert_receiver; - __ JumpIfSmi(r3, &convert_to_object); - STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE); - __ CompareObjectType(r3, r4, r4, FIRST_JS_RECEIVER_TYPE); - __ b(hs, &done_convert); - __ JumpIfRoot(r3, Heap::kUndefinedValueRootIndex, &convert_global_proxy); - __ JumpIfNotRoot(r3, Heap::kNullValueRootIndex, &convert_to_object); - __ bind(&convert_global_proxy); - { - // Patch receiver to global proxy. - __ LoadGlobalProxy(r3); - } - __ b(&convert_receiver); - __ bind(&convert_to_object); - { - // Convert receiver using ToObject. - // TODO(bmeurer): Inline the allocation here to avoid building the frame - // in the fast case? (fall back to AllocateInNewSpace?) - FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); - __ SmiTag(r0); - __ Push(r0, r1); - __ mov(r0, r3); - ToObjectStub stub(masm->isolate()); - __ CallStub(&stub); - __ mov(r3, r0); - __ Pop(r0, r1); - __ SmiUntag(r0); - } - __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); - __ bind(&convert_receiver); - __ str(r3, MemOperand(sp, r0, LSL, kPointerSizeLog2)); - } - __ bind(&done_convert); - - // ----------- S t a t e ------------- - // -- r0 : the number of arguments (not including the receiver) - // -- r1 : the function to call (checked to be a JSFunction) - // -- r2 : the shared function info. - // -- cp : the function context. - // ----------------------------------- - - __ ldr(r2, - FieldMemOperand(r2, SharedFunctionInfo::kFormalParameterCountOffset)); - __ SmiUntag(r2); - __ ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset)); - ParameterCount actual(r0); - ParameterCount expected(r2); - __ InvokeCode(r3, expected, actual, JUMP_FUNCTION, NullCallWrapper()); -} - - -// static -void Builtins::Generate_Call(MacroAssembler* masm) { - // ----------- S t a t e ------------- - // -- r0 : the number of arguments (not including the receiver) - // -- r1 : the target to call (can be any Object). - // ----------------------------------- - - Label non_smi, non_function; - __ JumpIfSmi(r1, &non_function); - __ bind(&non_smi); - __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE); - __ Jump(masm->isolate()->builtins()->CallFunction(), RelocInfo::CODE_TARGET, - eq); - __ cmp(r2, Operand(JS_FUNCTION_PROXY_TYPE)); - __ b(ne, &non_function); - - // 1. Call to function proxy. - // TODO(neis): This doesn't match the ES6 spec for [[Call]] on proxies. - __ ldr(r1, FieldMemOperand(r1, JSFunctionProxy::kCallTrapOffset)); - __ AssertNotSmi(r1); - __ b(&non_smi); - - // 2. Call to something else, which might have a [[Call]] internal method (if - // not we raise an exception). - __ bind(&non_function); - // TODO(bmeurer): I wonder why we prefer to have slow API calls? This could - // be awesome instead; i.e. a trivial improvement would be to call into the - // runtime and just deal with the API function there instead of returning a - // delegate from a runtime call that just jumps back to the runtime once - // called. Or, bonus points, call directly into the C API function here, as - // we do in some Crankshaft fast cases. - // Overwrite the original receiver with the (original) target. - __ str(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2)); - { - // Determine the delegate for the target (if any). - FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); - __ SmiTag(r0); - __ Push(r0, r1); - __ CallRuntime(Runtime::kGetFunctionDelegate, 1); - __ mov(r1, r0); - __ Pop(r0); - __ SmiUntag(r0); - } - // The delegate is always a regular function. - __ AssertFunction(r1); - __ Jump(masm->isolate()->builtins()->CallFunction(), RelocInfo::CODE_TARGET); -} - - void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { // ----------- S t a t e ------------- // -- r0 : actual number of arguments diff --git a/src/arm/code-stubs-arm.cc b/src/arm/code-stubs-arm.cc index af86e47a3..f504b72fe 100644 --- a/src/arm/code-stubs-arm.cc +++ b/src/arm/code-stubs-arm.cc @@ -2395,9 +2395,31 @@ static void EmitContinueIfStrictOrNative(MacroAssembler* masm, Label* cont) { } -static void EmitSlowCase(MacroAssembler* masm, int argc) { - __ mov(r0, Operand(argc)); - __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); +static void EmitSlowCase(MacroAssembler* masm, + int argc, + Label* non_function) { + // Check for function proxy. + __ cmp(r4, Operand(JS_FUNCTION_PROXY_TYPE)); + __ b(ne, non_function); + __ push(r1); // put proxy as additional argument + __ mov(r0, Operand(argc + 1, RelocInfo::NONE32)); + __ mov(r2, Operand::Zero()); + __ GetBuiltinFunction(r1, Context::CALL_FUNCTION_PROXY_BUILTIN_INDEX); + { + Handle adaptor = + masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); + __ Jump(adaptor, RelocInfo::CODE_TARGET); + } + + // CALL_NON_FUNCTION expects the non-function callee as receiver (instead + // of the original receiver from the call site). + __ bind(non_function); + __ str(r1, MemOperand(sp, argc * kPointerSize)); + __ mov(r0, Operand(argc)); // Set up the number of arguments. + __ mov(r2, Operand::Zero()); + __ GetBuiltinFunction(r1, Context::CALL_NON_FUNCTION_BUILTIN_INDEX); + __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), + RelocInfo::CODE_TARGET); } @@ -2419,12 +2441,12 @@ static void CallFunctionNoFeedback(MacroAssembler* masm, int argc, bool needs_checks, bool call_as_method) { // r1 : the function to call - Label slow, wrap, cont; + Label slow, non_function, wrap, cont; if (needs_checks) { // Check that the function is really a JavaScript function. // r1: pushed function (to be verified) - __ JumpIfSmi(r1, &slow); + __ JumpIfSmi(r1, &non_function); // Goto slow case if we do not have a function. __ CompareObjectType(r1, r4, r4, JS_FUNCTION_TYPE); @@ -2459,7 +2481,7 @@ static void CallFunctionNoFeedback(MacroAssembler* masm, if (needs_checks) { // Slow-case: Non-function called. __ bind(&slow); - EmitSlowCase(masm, argc); + EmitSlowCase(masm, argc, &non_function); } if (call_as_method) { @@ -2593,8 +2615,13 @@ void CallIC_ArrayStub::Generate(MacroAssembler* masm) { GenerateMiss(masm); // The slow case, we need this no matter what to complete a call after a miss. - __ mov(r0, Operand(arg_count())); - __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); + CallFunctionNoFeedback(masm, + arg_count(), + true, + CallAsMethod()); + + // Unreachable. + __ stop("Unexpected code address"); } @@ -2607,7 +2634,7 @@ void CallICStub::Generate(MacroAssembler* masm) { const int generic_offset = FixedArray::OffsetOfElementAt(TypeFeedbackVector::kGenericCountIndex); Label extra_checks_or_miss, slow_start; - Label slow, wrap, cont; + Label slow, non_function, wrap, cont; Label have_js_function; int argc = arg_count(); ParameterCount actual(argc); @@ -2661,7 +2688,7 @@ void CallICStub::Generate(MacroAssembler* masm) { __ InvokeFunction(r1, actual, JUMP_FUNCTION, NullCallWrapper()); __ bind(&slow); - EmitSlowCase(masm, argc); + EmitSlowCase(masm, argc, &non_function); if (CallAsMethod()) { __ bind(&wrap); @@ -2748,7 +2775,7 @@ void CallICStub::Generate(MacroAssembler* masm) { __ bind(&slow_start); // Check that the function is really a JavaScript function. // r1: pushed function (to be verified) - __ JumpIfSmi(r1, &slow); + __ JumpIfSmi(r1, &non_function); // Goto slow case if we do not have a function. __ CompareObjectType(r1, r4, r4, JS_FUNCTION_TYPE); diff --git a/src/arm/macro-assembler-arm.cc b/src/arm/macro-assembler-arm.cc index 220acec04..057a59cf5 100644 --- a/src/arm/macro-assembler-arm.cc +++ b/src/arm/macro-assembler-arm.cc @@ -2641,12 +2641,6 @@ void MacroAssembler::LoadContext(Register dst, int context_chain_length) { } -void MacroAssembler::LoadGlobalProxy(Register dst) { - ldr(dst, GlobalObjectOperand()); - ldr(dst, FieldMemOperand(dst, GlobalObject::kGlobalProxyOffset)); -} - - void MacroAssembler::LoadTransitionedArrayMapConditional( ElementsKind expected_kind, ElementsKind transitioned_kind, @@ -2808,19 +2802,6 @@ void MacroAssembler::AssertName(Register object) { } -void MacroAssembler::AssertFunction(Register object) { - if (emit_debug_code()) { - STATIC_ASSERT(kSmiTag == 0); - tst(object, Operand(kSmiTagMask)); - Check(ne, kOperandIsASmiAndNotAFunction); - push(object); - CompareObjectType(object, object, object, JS_FUNCTION_TYPE); - pop(object); - Check(ne, kOperandIsNotAFunction); - } -} - - void MacroAssembler::AssertUndefinedOrAllocationSite(Register object, Register scratch) { if (emit_debug_code()) { diff --git a/src/arm/macro-assembler-arm.h b/src/arm/macro-assembler-arm.h index d0f770499..d41878574 100644 --- a/src/arm/macro-assembler-arm.h +++ b/src/arm/macro-assembler-arm.h @@ -608,9 +608,6 @@ class MacroAssembler: public Assembler { void LoadContext(Register dst, int context_chain_length); - // Load the global proxy from the current context. - void LoadGlobalProxy(Register dst); - // Conditionally load the cached Array transitioned map of type // transitioned_kind from the native context if the map in register // map_in_out is the cached Array map in the native context of @@ -950,23 +947,7 @@ class MacroAssembler: public Assembler { // Compare the object in a register to a value from the root list. // Uses the ip register as scratch. void CompareRoot(Register obj, Heap::RootListIndex index); - void PushRoot(Heap::RootListIndex index) { - LoadRoot(ip, index); - Push(ip); - } - - // Compare the object in a register to a value and jump if they are equal. - void JumpIfRoot(Register with, Heap::RootListIndex index, Label* if_equal) { - CompareRoot(with, index); - b(eq, if_equal); - } - // Compare the object in a register to a value and jump if they are not equal. - void JumpIfNotRoot(Register with, Heap::RootListIndex index, - Label* if_not_equal) { - CompareRoot(with, index); - b(ne, if_not_equal); - } // Load and check the instance type of an object for being a string. // Loads the type into the second argument register. @@ -1307,9 +1288,6 @@ class MacroAssembler: public Assembler { // Abort execution if argument is not a name, enabled via --debug-code. void AssertName(Register object); - // Abort execution if argument is not a JSFunction, enabled via --debug-code. - void AssertFunction(Register object); - // Abort execution if argument is not undefined or an AllocationSite, enabled // via --debug-code. void AssertUndefinedOrAllocationSite(Register object, Register scratch); diff --git a/src/arm64/builtins-arm64.cc b/src/arm64/builtins-arm64.cc index 8297fc3e2..2dcfb1bd4 100644 --- a/src/arm64/builtins-arm64.cc +++ b/src/arm64/builtins-arm64.cc @@ -1290,15 +1290,21 @@ void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) { void Builtins::Generate_FunctionCall(MacroAssembler* masm) { + enum { + call_type_JS_func = 0, + call_type_func_proxy = 1, + call_type_non_func = 2 + }; Register argc = x0; Register function = x1; + Register call_type = x4; Register scratch1 = x10; Register scratch2 = x11; + Register receiver_type = x13; ASM_LOCATION("Builtins::Generate_FunctionCall"); // 1. Make sure we have at least one argument. - { - Label done; + { Label done; __ Cbnz(argc, &done); __ LoadRoot(scratch1, Heap::kUndefinedValueRootIndex); __ Push(scratch1); @@ -1306,14 +1312,107 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) { __ Bind(&done); } - // 2. Get the callable to call (passed as receiver) from the stack. + // 2. Get the function to call (passed as receiver) from the stack, check + // if it is a function. + Label slow, non_function; __ Peek(function, Operand(argc, LSL, kXRegSizeLog2)); + __ JumpIfSmi(function, &non_function); + __ JumpIfNotObjectType(function, scratch1, receiver_type, + JS_FUNCTION_TYPE, &slow); + + // 3a. Patch the first argument if necessary when calling a function. + Label shift_arguments; + __ Mov(call_type, static_cast(call_type_JS_func)); + { Label convert_to_object, use_global_proxy, patch_receiver; + // Change context eagerly in case we need the global receiver. + __ Ldr(cp, FieldMemOperand(function, JSFunction::kContextOffset)); + + // Do not transform the receiver for strict mode functions. + // Also do not transform the receiver for native (Compilerhints already in + // x3). + __ Ldr(scratch1, + FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset)); + __ Ldr(scratch2.W(), + FieldMemOperand(scratch1, SharedFunctionInfo::kCompilerHintsOffset)); + __ TestAndBranchIfAnySet( + scratch2.W(), + (1 << SharedFunctionInfo::kStrictModeFunction) | + (1 << SharedFunctionInfo::kNative), + &shift_arguments); + + // Compute the receiver in sloppy mode. + Register receiver = x2; + __ Sub(scratch1, argc, 1); + __ Peek(receiver, Operand(scratch1, LSL, kXRegSizeLog2)); + __ JumpIfSmi(receiver, &convert_to_object); + + __ JumpIfRoot(receiver, Heap::kUndefinedValueRootIndex, + &use_global_proxy); + __ JumpIfRoot(receiver, Heap::kNullValueRootIndex, &use_global_proxy); + + STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); + __ JumpIfObjectType(receiver, scratch1, scratch2, + FIRST_SPEC_OBJECT_TYPE, &shift_arguments, ge); + + __ Bind(&convert_to_object); + + { + // Enter an internal frame in order to preserve argument count. + FrameScope scope(masm, StackFrame::INTERNAL); + __ SmiTag(argc); + + __ Push(argc); + __ Mov(x0, receiver); + ToObjectStub stub(masm->isolate()); + __ CallStub(&stub); + __ Mov(receiver, x0); + + __ Pop(argc); + __ SmiUntag(argc); + + // Exit the internal frame. + } + + // Restore the function and flag in the registers. + __ Peek(function, Operand(argc, LSL, kXRegSizeLog2)); + __ Mov(call_type, static_cast(call_type_JS_func)); + __ B(&patch_receiver); + + __ Bind(&use_global_proxy); + __ Ldr(receiver, GlobalObjectMemOperand()); + __ Ldr(receiver, + FieldMemOperand(receiver, GlobalObject::kGlobalProxyOffset)); - // 3. Shift arguments and return address one slot down on the stack + + __ Bind(&patch_receiver); + __ Sub(scratch1, argc, 1); + __ Poke(receiver, Operand(scratch1, LSL, kXRegSizeLog2)); + + __ B(&shift_arguments); + } + + // 3b. Check for function proxy. + __ Bind(&slow); + __ Mov(call_type, static_cast(call_type_func_proxy)); + __ Cmp(receiver_type, JS_FUNCTION_PROXY_TYPE); + __ B(eq, &shift_arguments); + __ Bind(&non_function); + __ Mov(call_type, static_cast(call_type_non_func)); + + // 3c. Patch the first argument when calling a non-function. The + // CALL_NON_FUNCTION builtin expects the non-function callee as + // receiver, so overwrite the first argument which will ultimately + // become the receiver. + // call type (0: JS function, 1: function proxy, 2: non-function) + __ Sub(scratch1, argc, 1); + __ Poke(function, Operand(scratch1, LSL, kXRegSizeLog2)); + + // 4. Shift arguments and return address one slot down on the stack // (overwriting the original receiver). Adjust argument count to make // the original first argument the new receiver. - { - Label loop; + // call type (0: JS function, 1: function proxy, 2: non-function) + __ Bind(&shift_arguments); + { Label loop; // Calculate the copy start address (destination). Copy end address is jssp. __ Add(scratch2, jssp, Operand(argc, LSL, kPointerSizeLog2)); __ Sub(scratch1, scratch2, kPointerSize); @@ -1329,8 +1428,46 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) { __ Drop(1); } - // 4. Call the callable. - __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); + // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin, + // or a function proxy via CALL_FUNCTION_PROXY. + // call type (0: JS function, 1: function proxy, 2: non-function) + { Label js_function, non_proxy; + __ Cbz(call_type, &js_function); + // Expected number of arguments is 0 for CALL_NON_FUNCTION. + __ Mov(x2, 0); + __ Cmp(call_type, static_cast(call_type_func_proxy)); + __ B(ne, &non_proxy); + + __ Push(function); // Re-add proxy object as additional argument. + __ Add(argc, argc, 1); + __ GetBuiltinFunction(function, Context::CALL_FUNCTION_PROXY_BUILTIN_INDEX); + __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), + RelocInfo::CODE_TARGET); + + __ Bind(&non_proxy); + __ GetBuiltinFunction(function, Context::CALL_NON_FUNCTION_BUILTIN_INDEX); + __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), + RelocInfo::CODE_TARGET); + __ Bind(&js_function); + } + + // 5b. Get the code to call from the function and check that the number of + // expected arguments matches what we're providing. If so, jump + // (tail-call) to the code in register edx without checking arguments. + __ Ldr(x3, FieldMemOperand(function, JSFunction::kSharedFunctionInfoOffset)); + __ Ldrsw(x2, + FieldMemOperand(x3, + SharedFunctionInfo::kFormalParameterCountOffset)); + Label dont_adapt_args; + __ Cmp(x2, argc); // Check formal and actual parameter counts. + __ B(eq, &dont_adapt_args); + __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), + RelocInfo::CODE_TARGET); + __ Bind(&dont_adapt_args); + + __ Ldr(x3, FieldMemOperand(function, JSFunction::kCodeEntryOffset)); + ParameterCount expected(0); + __ InvokeCode(x3, expected, expected, JUMP_FUNCTION, NullCallWrapper()); } @@ -1416,21 +1553,88 @@ static void Generate_ApplyHelper(MacroAssembler* masm, bool targetIsArgument) { Generate_CheckStackOverflow(masm, kFunctionOffset, argc, kArgcIsSmiTagged); - // Push current limit, index and receiver. + // Push current limit and index. __ Mov(x1, 0); // Initial index. + __ Push(argc, x1); + + Label push_receiver; __ Ldr(receiver, MemOperand(fp, kReceiverOffset)); - __ Push(argc, x1, receiver); + + // Check that the function is a JS function. Otherwise it must be a proxy. + // When it is not the function proxy will be invoked later. + __ JumpIfNotObjectType(function, x10, x11, JS_FUNCTION_TYPE, + &push_receiver); + + // Change context eagerly to get the right global object if necessary. + __ Ldr(cp, FieldMemOperand(function, JSFunction::kContextOffset)); + // Load the shared function info. + __ Ldr(x2, FieldMemOperand(function, + JSFunction::kSharedFunctionInfoOffset)); + + // Compute and push the receiver. + // Do not transform the receiver for strict mode functions. + Label convert_receiver_to_object, use_global_proxy; + __ Ldr(w10, FieldMemOperand(x2, SharedFunctionInfo::kCompilerHintsOffset)); + __ Tbnz(x10, SharedFunctionInfo::kStrictModeFunction, &push_receiver); + // Do not transform the receiver for native functions. + __ Tbnz(x10, SharedFunctionInfo::kNative, &push_receiver); + + // Compute the receiver in sloppy mode. + __ JumpIfSmi(receiver, &convert_receiver_to_object); + __ JumpIfRoot(receiver, Heap::kNullValueRootIndex, &use_global_proxy); + __ JumpIfRoot(receiver, Heap::kUndefinedValueRootIndex, + &use_global_proxy); + + // Check if the receiver is already a JavaScript object. + STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); + __ JumpIfObjectType(receiver, x10, x11, FIRST_SPEC_OBJECT_TYPE, + &push_receiver, ge); + + // Call a builtin to convert the receiver to a regular object. + __ Bind(&convert_receiver_to_object); + __ Mov(x0, receiver); + ToObjectStub stub(masm->isolate()); + __ CallStub(&stub); + __ Mov(receiver, x0); + __ B(&push_receiver); + + __ Bind(&use_global_proxy); + __ Ldr(x10, GlobalObjectMemOperand()); + __ Ldr(receiver, FieldMemOperand(x10, GlobalObject::kGlobalProxyOffset)); + + // Push the receiver + __ Bind(&push_receiver); + __ Push(receiver); // Copy all arguments from the array to the stack. - Generate_PushAppliedArguments(masm, kArgumentsOffset, kIndexOffset, - kLimitOffset); + Generate_PushAppliedArguments( + masm, kArgumentsOffset, kIndexOffset, kLimitOffset); - // At the end of the loop, the number of arguments is stored in x0, untagged + // At the end of the loop, the number of arguments is stored in 'current', + // represented as a smi. - // Call the callable. - // TODO(bmeurer): This should be a tail call according to ES6. - __ Ldr(x1, MemOperand(fp, kFunctionOffset)); - __ Call(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); + function = x1; // From now on we want the function to be kept in x1; + __ Ldr(function, MemOperand(fp, kFunctionOffset)); + + // Call the function. + Label call_proxy; + ParameterCount actual(x0); + __ JumpIfNotObjectType(function, x10, x11, JS_FUNCTION_TYPE, &call_proxy); + __ InvokeFunction(function, actual, CALL_FUNCTION, NullCallWrapper()); + frame_scope.GenerateLeaveFrame(); + __ Drop(kStackSize); + __ Ret(); + + // Call the function proxy. + __ Bind(&call_proxy); + // x0 : argc + // x1 : function + __ Push(function); // Add function proxy as last argument. + __ Add(x0, x0, 1); + __ Mov(x2, 0); + __ GetBuiltinFunction(x1, Context::CALL_FUNCTION_PROXY_BUILTIN_INDEX); + __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), + RelocInfo::CODE_TARGET); } __ Drop(kStackSize); __ Ret(); @@ -1566,140 +1770,6 @@ static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) { } -// static -void Builtins::Generate_CallFunction(MacroAssembler* masm) { - // ----------- S t a t e ------------- - // -- x0 : the number of arguments (not including the receiver) - // -- x1 : the function to call (checked to be a JSFunction) - // ----------------------------------- - - Label convert, convert_global_proxy, convert_to_object, done_convert; - __ AssertFunction(x1); - // TODO(bmeurer): Throw a TypeError if function's [[FunctionKind]] internal - // slot is "classConstructor". - // Enter the context of the function; ToObject has to run in the function - // context, and we also need to take the global proxy from the function - // context in case of conversion. - // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList) - __ Ldr(cp, FieldMemOperand(x1, JSFunction::kContextOffset)); - __ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); - // We need to convert the receiver for non-native sloppy mode functions. - __ Ldr(w3, FieldMemOperand(x2, SharedFunctionInfo::kCompilerHintsOffset)); - __ TestAndBranchIfAnySet(w3, - (1 << SharedFunctionInfo::kNative) | - (1 << SharedFunctionInfo::kStrictModeFunction), - &done_convert); - { - __ Peek(x3, Operand(x0, LSL, kXRegSizeLog2)); - - // ----------- S t a t e ------------- - // -- x0 : the number of arguments (not including the receiver) - // -- x1 : the function to call (checked to be a JSFunction) - // -- x2 : the shared function info. - // -- x3 : the receiver - // -- cp : the function context. - // ----------------------------------- - - Label convert_receiver; - __ JumpIfSmi(x3, &convert_to_object); - STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE); - __ CompareObjectType(x3, x4, x4, FIRST_JS_RECEIVER_TYPE); - __ B(hs, &done_convert); - __ JumpIfRoot(x3, Heap::kUndefinedValueRootIndex, &convert_global_proxy); - __ JumpIfNotRoot(x3, Heap::kNullValueRootIndex, &convert_to_object); - __ Bind(&convert_global_proxy); - { - // Patch receiver to global proxy. - __ LoadGlobalProxy(x3); - } - __ B(&convert_receiver); - __ Bind(&convert_to_object); - { - // Convert receiver using ToObject. - // TODO(bmeurer): Inline the allocation here to avoid building the frame - // in the fast case? (fall back to AllocateInNewSpace?) - FrameScope scope(masm, StackFrame::INTERNAL); - __ SmiTag(x0); - __ Push(x0, x1); - __ Mov(x0, x3); - ToObjectStub stub(masm->isolate()); - __ CallStub(&stub); - __ Mov(x3, x0); - __ Pop(x1, x0); - __ SmiUntag(x0); - } - __ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); - __ Bind(&convert_receiver); - __ Poke(x3, Operand(x0, LSL, kXRegSizeLog2)); - } - __ Bind(&done_convert); - - // ----------- S t a t e ------------- - // -- x0 : the number of arguments (not including the receiver) - // -- x1 : the function to call (checked to be a JSFunction) - // -- x2 : the shared function info. - // -- cp : the function context. - // ----------------------------------- - - __ Ldrsw( - x2, FieldMemOperand(x2, SharedFunctionInfo::kFormalParameterCountOffset)); - __ Ldr(x3, FieldMemOperand(x1, JSFunction::kCodeEntryOffset)); - ParameterCount actual(x0); - ParameterCount expected(x2); - __ InvokeCode(x3, expected, actual, JUMP_FUNCTION, NullCallWrapper()); -} - - -// static -void Builtins::Generate_Call(MacroAssembler* masm) { - // ----------- S t a t e ------------- - // -- x0 : the number of arguments (not including the receiver) - // -- x1 : the target to call (can be any Object). - // ----------------------------------- - - Label non_smi, non_jsfunction, non_function; - __ JumpIfSmi(x1, &non_function); - __ Bind(&non_smi); - __ CompareObjectType(x1, x2, x2, JS_FUNCTION_TYPE); - __ B(ne, &non_jsfunction); - __ Jump(masm->isolate()->builtins()->CallFunction(), RelocInfo::CODE_TARGET); - __ Bind(&non_jsfunction); - __ Cmp(x2, JS_FUNCTION_PROXY_TYPE); - __ B(ne, &non_function); - - // 1. Call to function proxy. - // TODO(neis): This doesn't match the ES6 spec for [[Call]] on proxies. - __ Ldr(x1, FieldMemOperand(x1, JSFunctionProxy::kCallTrapOffset)); - __ AssertNotSmi(x1); - __ B(&non_smi); - - // 2. Call to something else, which might have a [[Call]] internal method (if - // not we raise an exception). - __ Bind(&non_function); - // TODO(bmeurer): I wonder why we prefer to have slow API calls? This could - // be awesome instead; i.e. a trivial improvement would be to call into the - // runtime and just deal with the API function there instead of returning a - // delegate from a runtime call that just jumps back to the runtime once - // called. Or, bonus points, call directly into the C API function here, as - // we do in some Crankshaft fast cases. - // Overwrite the original receiver with the (original) target. - __ Poke(x1, Operand(x0, LSL, kXRegSizeLog2)); - { - // Determine the delegate for the target (if any). - FrameScope scope(masm, StackFrame::INTERNAL); - __ SmiTag(x0); - __ Push(x0, x1); - __ CallRuntime(Runtime::kGetFunctionDelegate, 1); - __ Mov(x1, x0); - __ Pop(x0); - __ SmiUntag(x0); - } - // The delegate is always a regular function. - __ AssertFunction(x1); - __ Jump(masm->isolate()->builtins()->CallFunction(), RelocInfo::CODE_TARGET); -} - - void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { ASM_LOCATION("Builtins::Generate_ArgumentsAdaptorTrampoline"); // ----------- S t a t e ------------- diff --git a/src/arm64/code-stubs-arm64.cc b/src/arm64/code-stubs-arm64.cc index 367051d70..1720bdfad 100644 --- a/src/arm64/code-stubs-arm64.cc +++ b/src/arm64/code-stubs-arm64.cc @@ -2760,9 +2760,33 @@ static void EmitContinueIfStrictOrNative(MacroAssembler* masm, Label* cont) { } -static void EmitSlowCase(MacroAssembler* masm, int argc) { - __ Mov(x0, argc); - __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); +static void EmitSlowCase(MacroAssembler* masm, + int argc, + Register function, + Register type, + Label* non_function) { + // Check for function proxy. + // x10 : function type. + __ CompareAndBranch(type, JS_FUNCTION_PROXY_TYPE, ne, non_function); + __ Push(function); // put proxy as additional argument + __ Mov(x0, argc + 1); + __ Mov(x2, 0); + __ GetBuiltinFunction(x1, Context::CALL_FUNCTION_PROXY_BUILTIN_INDEX); + { + Handle adaptor = + masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); + __ Jump(adaptor, RelocInfo::CODE_TARGET); + } + + // CALL_NON_FUNCTION expects the non-function callee as receiver (instead + // of the original receiver from the call site). + __ Bind(non_function); + __ Poke(function, argc * kXRegSize); + __ Mov(x0, argc); // Set up the number of arguments. + __ Mov(x2, 0); + __ GetBuiltinFunction(function, Context::CALL_NON_FUNCTION_BUILTIN_INDEX); + __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), + RelocInfo::CODE_TARGET); } @@ -2786,14 +2810,14 @@ static void CallFunctionNoFeedback(MacroAssembler* masm, // x1 function the function to call Register function = x1; Register type = x4; - Label slow, wrap, cont; + Label slow, non_function, wrap, cont; // TODO(jbramley): This function has a lot of unnamed registers. Name them, // and tidy things up a bit. if (needs_checks) { // Check that the function is really a JavaScript function. - __ JumpIfSmi(function, &slow); + __ JumpIfSmi(function, &non_function); // Goto slow case if we do not have a function. __ JumpIfNotObjectType(function, x10, type, JS_FUNCTION_TYPE, &slow); @@ -2828,7 +2852,7 @@ static void CallFunctionNoFeedback(MacroAssembler* masm, if (needs_checks) { // Slow-case: Non-function called. __ Bind(&slow); - EmitSlowCase(masm, argc); + EmitSlowCase(masm, argc, function, type, &non_function); } if (call_as_method) { @@ -2978,8 +3002,12 @@ void CallIC_ArrayStub::Generate(MacroAssembler* masm) { GenerateMiss(masm); // The slow case, we need this no matter what to complete a call after a miss. - __ Mov(x0, arg_count()); - __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); + CallFunctionNoFeedback(masm, + arg_count(), + true, + CallAsMethod()); + + __ Unreachable(); } @@ -2994,7 +3022,7 @@ void CallICStub::Generate(MacroAssembler* masm) { const int generic_offset = FixedArray::OffsetOfElementAt(TypeFeedbackVector::kGenericCountIndex); Label extra_checks_or_miss, slow_start; - Label slow, wrap, cont; + Label slow, non_function, wrap, cont; Label have_js_function; int argc = arg_count(); ParameterCount actual(argc); @@ -3059,7 +3087,7 @@ void CallICStub::Generate(MacroAssembler* masm) { NullCallWrapper()); __ bind(&slow); - EmitSlowCase(masm, argc); + EmitSlowCase(masm, argc, function, type, &non_function); if (CallAsMethod()) { __ bind(&wrap); @@ -3144,7 +3172,7 @@ void CallICStub::Generate(MacroAssembler* masm) { __ bind(&slow_start); // Check that the function is really a JavaScript function. - __ JumpIfSmi(function, &slow); + __ JumpIfSmi(function, &non_function); // Goto slow case if we do not have a function. __ JumpIfNotObjectType(function, x10, type, JS_FUNCTION_TYPE, &slow); diff --git a/src/arm64/macro-assembler-arm64.cc b/src/arm64/macro-assembler-arm64.cc index 75814e83a..16cdf4a53 100644 --- a/src/arm64/macro-assembler-arm64.cc +++ b/src/arm64/macro-assembler-arm64.cc @@ -1613,19 +1613,6 @@ void MacroAssembler::AssertName(Register object) { } -void MacroAssembler::AssertFunction(Register object) { - if (emit_debug_code()) { - AssertNotSmi(object, kOperandIsASmiAndNotAFunction); - - UseScratchRegisterScope temps(this); - Register temp = temps.AcquireX(); - - CompareObjectType(object, temp, temp, JS_FUNCTION_TYPE); - Check(eq, kOperandIsNotAFunction); - } -} - - void MacroAssembler::AssertUndefinedOrAllocationSite(Register object, Register scratch) { if (emit_debug_code()) { @@ -3058,12 +3045,6 @@ void MacroAssembler::LoadContext(Register dst, int context_chain_length) { } -void MacroAssembler::LoadGlobalProxy(Register dst) { - Ldr(dst, GlobalObjectMemOperand()); - Ldr(dst, FieldMemOperand(dst, GlobalObject::kGlobalProxyOffset)); -} - - void MacroAssembler::DebugBreak() { Mov(x0, 0); Mov(x1, ExternalReference(Runtime::kHandleDebuggerStatement, isolate())); diff --git a/src/arm64/macro-assembler-arm64.h b/src/arm64/macro-assembler-arm64.h index 29d5c5c5f..a050a74fc 100644 --- a/src/arm64/macro-assembler-arm64.h +++ b/src/arm64/macro-assembler-arm64.h @@ -959,9 +959,6 @@ class MacroAssembler : public Assembler { // Abort execution if argument is not a name, enabled via --debug-code. void AssertName(Register object); - // Abort execution if argument is not a JSFunction, enabled via --debug-code. - void AssertFunction(Register object); - // Abort execution if argument is not undefined or an AllocationSite, enabled // via --debug-code. void AssertUndefinedOrAllocationSite(Register object, Register scratch); @@ -1693,9 +1690,6 @@ class MacroAssembler : public Assembler { void LoadContext(Register dst, int context_chain_length); - // Load the global proxy from the current context. - void LoadGlobalProxy(Register dst); - // Emit code for a truncating division by a constant. The dividend register is // unchanged. Dividend and result must be different. void TruncatingDiv(Register result, Register dividend, int32_t divisor); diff --git a/src/bailout-reason.h b/src/bailout-reason.h index b63c5fbfb..753989649 100644 --- a/src/bailout-reason.h +++ b/src/bailout-reason.h @@ -154,12 +154,10 @@ namespace internal { V(kObjectFoundInSmiOnlyArray, "Object found in smi-only array") \ V(kObjectLiteralWithComplexProperty, "Object literal with complex property") \ V(kOffsetOutOfRange, "Offset out of range") \ - V(kOperandIsASmiAndNotAFunction, "Operand is a smi and not a function") \ V(kOperandIsASmiAndNotAName, "Operand is a smi and not a name") \ V(kOperandIsASmiAndNotAString, "Operand is a smi and not a string") \ V(kOperandIsASmi, "Operand is a smi") \ V(kOperandIsNotADate, "Operand is not a date") \ - V(kOperandIsNotAFunction, "Operand is not a function") \ V(kOperandIsNotAName, "Operand is not a name") \ V(kOperandIsNotANumber, "Operand is not a number") \ V(kOperandIsNotASmi, "Operand is not a smi") \ diff --git a/src/builtins.h b/src/builtins.h index 58b69d296..694273afd 100644 --- a/src/builtins.h +++ b/src/builtins.h @@ -70,10 +70,6 @@ enum BuiltinExtraArguments { // Define list of builtins implemented in assembly. #define BUILTIN_LIST_A(V) \ V(ArgumentsAdaptorTrampoline, BUILTIN, UNINITIALIZED, kNoExtraICState) \ - \ - V(CallFunction, BUILTIN, UNINITIALIZED, kNoExtraICState) \ - V(Call, BUILTIN, UNINITIALIZED, kNoExtraICState) \ - \ V(InOptimizationQueue, BUILTIN, UNINITIALIZED, kNoExtraICState) \ V(JSConstructStubGeneric, BUILTIN, UNINITIALIZED, kNoExtraICState) \ V(JSConstructStubForDerived, BUILTIN, UNINITIALIZED, kNoExtraICState) \ @@ -265,11 +261,6 @@ class Builtins { static void Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm); static void Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm); - // ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList) - static void Generate_CallFunction(MacroAssembler* masm); - // ES6 section 7.3.12 Call(F, V, [argumentsList]) - static void Generate_Call(MacroAssembler* masm); - static void Generate_FunctionCall(MacroAssembler* masm); static void Generate_FunctionApply(MacroAssembler* masm); static void Generate_ReflectApply(MacroAssembler* masm); diff --git a/src/code-stubs.h b/src/code-stubs.h index 94edfa4aa..bd7a35933 100644 --- a/src/code-stubs.h +++ b/src/code-stubs.h @@ -1953,8 +1953,6 @@ class RegExpConstructResultStub final : public HydrogenCodeStub { }; -// TODO(bmeurer): Deprecate the CallFunctionStub in favor of the more general -// Invoke family of builtins. class CallFunctionStub: public PlatformCodeStub { public: CallFunctionStub(Isolate* isolate, int argc, CallFunctionFlags flags) diff --git a/src/contexts.h b/src/contexts.h index 4685e1636..6c23cb719 100644 --- a/src/contexts.h +++ b/src/contexts.h @@ -102,8 +102,11 @@ enum BindingFlags { V(BIT_XOR_STRONG_BUILTIN_INDEX, JSFunction, bit_xor_strong_builtin) \ V(CALL_FUNCTION_PROXY_AS_CONSTRUCTOR_BUILTIN_INDEX, JSFunction, \ call_function_proxy_as_constructor_builtin) \ + V(CALL_FUNCTION_PROXY_BUILTIN_INDEX, JSFunction, \ + call_function_proxy_builtin) \ V(CALL_NON_FUNCTION_AS_CONSTRUCTOR_BUILTIN_INDEX, JSFunction, \ call_non_function_as_constructor_builtin) \ + V(CALL_NON_FUNCTION_BUILTIN_INDEX, JSFunction, call_non_function_builtin) \ V(COMPARE_BUILTIN_INDEX, JSFunction, compare_builtin) \ V(COMPARE_STRONG_BUILTIN_INDEX, JSFunction, compare_strong_builtin) \ V(CONCAT_ITERABLE_TO_ARRAY_BUILTIN_INDEX, JSFunction, \ diff --git a/src/ia32/assembler-ia32.cc b/src/ia32/assembler-ia32.cc index 9f64a6005..2e17fcb00 100644 --- a/src/ia32/assembler-ia32.cc +++ b/src/ia32/assembler-ia32.cc @@ -1587,12 +1587,12 @@ void Assembler::j(Condition cc, byte* entry, RelocInfo::Mode rmode) { } -void Assembler::j(Condition cc, Handle code, RelocInfo::Mode rmode) { +void Assembler::j(Condition cc, Handle code) { EnsureSpace ensure_space(this); // 0000 1111 1000 tttn #32-bit disp EMIT(0x0F); EMIT(0x80 | cc); - emit(code, rmode); + emit(code, RelocInfo::CODE_TARGET); } diff --git a/src/ia32/assembler-ia32.h b/src/ia32/assembler-ia32.h index 57987bc75..3daa294aa 100644 --- a/src/ia32/assembler-ia32.h +++ b/src/ia32/assembler-ia32.h @@ -856,8 +856,7 @@ class Assembler : public AssemblerBase { Label* L, Label::Distance distance = Label::kFar); void j(Condition cc, byte* entry, RelocInfo::Mode rmode); - void j(Condition cc, Handle code, - RelocInfo::Mode rmode = RelocInfo::CODE_TARGET); + void j(Condition cc, Handle code); // Floating-point operations void fld(int i); diff --git a/src/ia32/builtins-ia32.cc b/src/ia32/builtins-ia32.cc index 368b649a6..477856cf3 100644 --- a/src/ia32/builtins-ia32.cc +++ b/src/ia32/builtins-ia32.cc @@ -956,50 +956,161 @@ void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) { } -// static void Builtins::Generate_FunctionCall(MacroAssembler* masm) { - // Stack Layout: - // esp[0] : Return address - // esp[8] : Argument n - // esp[16] : Argument n-1 - // ... - // esp[8 * n] : Argument 1 - // esp[8 * (n + 1)] : Receiver (callable to call) - // - // eax contains the number of arguments, n, not counting the receiver. - // + Factory* factory = masm->isolate()->factory(); + // 1. Make sure we have at least one argument. - { - Label done; + { Label done; __ test(eax, eax); - __ j(not_zero, &done, Label::kNear); - __ PopReturnAddressTo(ebx); - __ PushRoot(Heap::kUndefinedValueRootIndex); - __ PushReturnAddressFrom(ebx); + __ j(not_zero, &done); + __ pop(ebx); + __ push(Immediate(factory->undefined_value())); + __ push(ebx); __ inc(eax); __ bind(&done); } - // 2. Get the callable to call (passed as receiver) from the stack. - __ mov(edi, Operand(esp, eax, times_pointer_size, kPointerSize)); + // 2. Get the function to call (passed as receiver) from the stack, check + // if it is a function. + Label slow, non_function; + // 1 ~ return address. + __ mov(edi, Operand(esp, eax, times_4, 1 * kPointerSize)); + __ JumpIfSmi(edi, &non_function); + __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx); + __ j(not_equal, &slow); + + + // 3a. Patch the first argument if necessary when calling a function. + Label shift_arguments; + __ Move(edx, Immediate(0)); // indicate regular JS_FUNCTION + { Label convert_to_object, use_global_proxy, patch_receiver; + // Change context eagerly in case we need the global receiver. + __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset)); + + // Do not transform the receiver for strict mode functions. + __ mov(ebx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); + __ test_b(FieldOperand(ebx, SharedFunctionInfo::kStrictModeByteOffset), + 1 << SharedFunctionInfo::kStrictModeBitWithinByte); + __ j(not_equal, &shift_arguments); + + // Do not transform the receiver for natives (shared already in ebx). + __ test_b(FieldOperand(ebx, SharedFunctionInfo::kNativeByteOffset), + 1 << SharedFunctionInfo::kNativeBitWithinByte); + __ j(not_equal, &shift_arguments); + + // Compute the receiver in sloppy mode. + __ mov(ebx, Operand(esp, eax, times_4, 0)); // First argument. + + // Call ToObject on the receiver if it is not an object, or use the + // global object if it is null or undefined. + __ JumpIfSmi(ebx, &convert_to_object); + __ cmp(ebx, factory->null_value()); + __ j(equal, &use_global_proxy); + __ cmp(ebx, factory->undefined_value()); + __ j(equal, &use_global_proxy); + STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); + __ CmpObjectType(ebx, FIRST_SPEC_OBJECT_TYPE, ecx); + __ j(above_equal, &shift_arguments); + + __ bind(&convert_to_object); + + { // In order to preserve argument count. + FrameScope scope(masm, StackFrame::INTERNAL); + __ SmiTag(eax); + __ push(eax); + + __ mov(eax, ebx); + ToObjectStub stub(masm->isolate()); + __ CallStub(&stub); + __ mov(ebx, eax); + __ Move(edx, Immediate(0)); // restore + + __ pop(eax); + __ SmiUntag(eax); + } + + // Restore the function to edi. + __ mov(edi, Operand(esp, eax, times_4, 1 * kPointerSize)); + __ jmp(&patch_receiver); + + __ bind(&use_global_proxy); + __ mov(ebx, + Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); + __ mov(ebx, FieldOperand(ebx, GlobalObject::kGlobalProxyOffset)); + + __ bind(&patch_receiver); + __ mov(Operand(esp, eax, times_4, 0), ebx); + + __ jmp(&shift_arguments); + } + + // 3b. Check for function proxy. + __ bind(&slow); + __ Move(edx, Immediate(1)); // indicate function proxy + __ CmpInstanceType(ecx, JS_FUNCTION_PROXY_TYPE); + __ j(equal, &shift_arguments); + __ bind(&non_function); + __ Move(edx, Immediate(2)); // indicate non-function + + // 3c. Patch the first argument when calling a non-function. The + // CALL_NON_FUNCTION builtin expects the non-function callee as + // receiver, so overwrite the first argument which will ultimately + // become the receiver. + __ mov(Operand(esp, eax, times_4, 0), edi); - // 3. Shift arguments and return address one slot down on the stack + // 4. Shift arguments and return address one slot down on the stack // (overwriting the original receiver). Adjust argument count to make // the original first argument the new receiver. - { - Label loop; + __ bind(&shift_arguments); + { Label loop; __ mov(ecx, eax); __ bind(&loop); - __ mov(ebx, Operand(esp, ecx, times_pointer_size, 0)); - __ mov(Operand(esp, ecx, times_pointer_size, kPointerSize), ebx); + __ mov(ebx, Operand(esp, ecx, times_4, 0)); + __ mov(Operand(esp, ecx, times_4, kPointerSize), ebx); __ dec(ecx); __ j(not_sign, &loop); // While non-negative (to copy return address). - __ pop(ebx); // Discard copy of return address. + __ pop(ebx); // Discard copy of return address. __ dec(eax); // One fewer argument (first argument is new receiver). } - // 4. Call the callable. - __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); + // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin, + // or a function proxy via CALL_FUNCTION_PROXY. + { Label function, non_proxy; + __ test(edx, edx); + __ j(zero, &function); + __ Move(ebx, Immediate(0)); + __ cmp(edx, Immediate(1)); + __ j(not_equal, &non_proxy); + + __ pop(edx); // return address + __ push(edi); // re-add proxy object as additional argument + __ push(edx); + __ inc(eax); + __ GetBuiltinEntry(edx, Context::CALL_FUNCTION_PROXY_BUILTIN_INDEX); + __ jmp(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), + RelocInfo::CODE_TARGET); + + __ bind(&non_proxy); + __ GetBuiltinEntry(edx, Context::CALL_NON_FUNCTION_BUILTIN_INDEX); + __ jmp(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), + RelocInfo::CODE_TARGET); + __ bind(&function); + } + + // 5b. Get the code to call from the function and check that the number of + // expected arguments matches what we're providing. If so, jump + // (tail-call) to the code in register edx without checking arguments. + __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); + __ mov(ebx, + FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset)); + __ mov(edx, FieldOperand(edi, JSFunction::kCodeEntryOffset)); + __ SmiUntag(ebx); + __ cmp(eax, ebx); + __ j(not_equal, + masm->isolate()->builtins()->ArgumentsAdaptorTrampoline()); + + ParameterCount expected(0); + __ InvokeCode(edx, expected, expected, JUMP_FUNCTION, NullCallWrapper()); } @@ -1074,7 +1185,7 @@ static void Generate_ApplyHelper(MacroAssembler* masm, bool targetIsArgument) { static const int kReceiverOffset = kArgumentsOffset + kPointerSize; static const int kFunctionOffset = kReceiverOffset + kPointerSize; - __ push(Operand(ebp, kFunctionOffset)); // push this + __ push(Operand(ebp, kFunctionOffset)); // push this __ push(Operand(ebp, kArgumentsOffset)); // push arguments if (targetIsArgument) { __ InvokeBuiltin(Context::REFLECT_APPLY_PREPARE_BUILTIN_INDEX, @@ -1089,18 +1200,87 @@ static void Generate_ApplyHelper(MacroAssembler* masm, bool targetIsArgument) { const int kLimitOffset = StandardFrameConstants::kExpressionsOffset - 1 * kPointerSize; const int kIndexOffset = kLimitOffset - 1 * kPointerSize; - __ Push(eax); // limit - __ Push(Immediate(0)); // index - __ Push(Operand(ebp, kReceiverOffset)); // receiver + __ push(eax); // limit + __ push(Immediate(0)); // index + + // Get the receiver. + __ mov(ebx, Operand(ebp, kReceiverOffset)); + + // Check that the function is a JS function (otherwise it must be a proxy). + Label push_receiver, use_global_proxy; + __ mov(edi, Operand(ebp, kFunctionOffset)); + __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx); + __ j(not_equal, &push_receiver); + + // Change context eagerly to get the right global object if necessary. + __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset)); + + // Compute the receiver. + // Do not transform the receiver for strict mode functions. + Label call_to_object; + __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); + __ test_b(FieldOperand(ecx, SharedFunctionInfo::kStrictModeByteOffset), + 1 << SharedFunctionInfo::kStrictModeBitWithinByte); + __ j(not_equal, &push_receiver); + + Factory* factory = masm->isolate()->factory(); + + // Do not transform the receiver for natives (shared already in ecx). + __ test_b(FieldOperand(ecx, SharedFunctionInfo::kNativeByteOffset), + 1 << SharedFunctionInfo::kNativeBitWithinByte); + __ j(not_equal, &push_receiver); + + // Compute the receiver in sloppy mode. + // Call ToObject on the receiver if it is not an object, or use the + // global object if it is null or undefined. + __ JumpIfSmi(ebx, &call_to_object); + __ cmp(ebx, factory->null_value()); + __ j(equal, &use_global_proxy); + __ cmp(ebx, factory->undefined_value()); + __ j(equal, &use_global_proxy); + STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); + __ CmpObjectType(ebx, FIRST_SPEC_OBJECT_TYPE, ecx); + __ j(above_equal, &push_receiver); + + __ bind(&call_to_object); + __ mov(eax, ebx); + ToObjectStub stub(masm->isolate()); + __ CallStub(&stub); + __ mov(ebx, eax); + __ jmp(&push_receiver); + + __ bind(&use_global_proxy); + __ mov(ebx, + Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); + __ mov(ebx, FieldOperand(ebx, GlobalObject::kGlobalProxyOffset)); + + // Push the receiver. + __ bind(&push_receiver); + __ push(ebx); // Loop over the arguments array, pushing each value to the stack - Generate_PushAppliedArguments(masm, kArgumentsOffset, kIndexOffset, - kLimitOffset); + Generate_PushAppliedArguments( + masm, kArgumentsOffset, kIndexOffset, kLimitOffset); - // Call the callable. - // TODO(bmeurer): This should be a tail call according to ES6. + // Call the function. + Label call_proxy; + ParameterCount actual(eax); __ mov(edi, Operand(ebp, kFunctionOffset)); - __ Call(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); + __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx); + __ j(not_equal, &call_proxy); + __ InvokeFunction(edi, actual, CALL_FUNCTION, NullCallWrapper()); + + frame_scope.GenerateLeaveFrame(); + __ ret(kStackSize * kPointerSize); // remove this, receiver, and arguments + + // Call the function proxy. + __ bind(&call_proxy); + __ push(edi); // add function proxy as last argument + __ inc(eax); + __ Move(ebx, Immediate(0)); + __ GetBuiltinEntry(edx, Context::CALL_FUNCTION_PROXY_BUILTIN_INDEX); + __ call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), + RelocInfo::CODE_TARGET); // Leave internal frame. } @@ -1437,146 +1617,6 @@ static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) { } -// static -void Builtins::Generate_CallFunction(MacroAssembler* masm) { - // ----------- S t a t e ------------- - // -- eax : the number of arguments (not including the receiver) - // -- edi : the function to call (checked to be a JSFunction) - // ----------------------------------- - - Label convert, convert_global_proxy, convert_to_object, done_convert; - __ AssertFunction(edi); - // TODO(bmeurer): Throw a TypeError if function's [[FunctionKind]] internal - // slot is "classConstructor". - // Enter the context of the function; ToObject has to run in the function - // context, and we also need to take the global proxy from the function - // context in case of conversion. - // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList) - STATIC_ASSERT(SharedFunctionInfo::kNativeByteOffset == - SharedFunctionInfo::kStrictModeByteOffset); - __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset)); - __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); - // We need to convert the receiver for non-native sloppy mode functions. - __ test_b(FieldOperand(edx, SharedFunctionInfo::kNativeByteOffset), - (1 << SharedFunctionInfo::kNativeBitWithinByte) | - (1 << SharedFunctionInfo::kStrictModeBitWithinByte)); - __ j(not_zero, &done_convert); - { - __ mov(ecx, Operand(esp, eax, times_pointer_size, kPointerSize)); - - // ----------- S t a t e ------------- - // -- eax : the number of arguments (not including the receiver) - // -- ecx : the receiver - // -- edx : the shared function info. - // -- edi : the function to call (checked to be a JSFunction) - // -- esi : the function context. - // ----------------------------------- - - Label convert_receiver; - __ JumpIfSmi(ecx, &convert_to_object, Label::kNear); - STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE); - __ CmpObjectType(ecx, FIRST_JS_RECEIVER_TYPE, ebx); - __ j(above_equal, &done_convert); - __ JumpIfRoot(ecx, Heap::kUndefinedValueRootIndex, &convert_global_proxy, - Label::kNear); - __ JumpIfNotRoot(ecx, Heap::kNullValueRootIndex, &convert_to_object, - Label::kNear); - __ bind(&convert_global_proxy); - { - // Patch receiver to global proxy. - __ LoadGlobalProxy(ecx); - } - __ jmp(&convert_receiver); - __ bind(&convert_to_object); - { - // Convert receiver using ToObject. - // TODO(bmeurer): Inline the allocation here to avoid building the frame - // in the fast case? (fall back to AllocateInNewSpace?) - FrameScope scope(masm, StackFrame::INTERNAL); - __ SmiTag(eax); - __ Push(eax); - __ Push(edi); - __ mov(eax, ecx); - ToObjectStub stub(masm->isolate()); - __ CallStub(&stub); - __ mov(ecx, eax); - __ Pop(edi); - __ Pop(eax); - __ SmiUntag(eax); - } - __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); - __ bind(&convert_receiver); - __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), ecx); - } - __ bind(&done_convert); - - // ----------- S t a t e ------------- - // -- eax : the number of arguments (not including the receiver) - // -- edx : the shared function info. - // -- edi : the function to call (checked to be a JSFunction) - // -- esi : the function context. - // ----------------------------------- - - __ mov(ebx, - FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset)); - __ SmiUntag(ebx); - ParameterCount actual(eax); - ParameterCount expected(ebx); - __ InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset), expected, - actual, JUMP_FUNCTION, NullCallWrapper()); -} - - -// static -void Builtins::Generate_Call(MacroAssembler* masm) { - // ----------- S t a t e ------------- - // -- eax : the number of arguments (not including the receiver) - // -- edi : the target to call (can be any Object). - // ----------------------------------- - - Label non_smi, non_function; - __ JumpIfSmi(edi, &non_function); - __ bind(&non_smi); - __ CmpObjectType(edi, JS_FUNCTION_TYPE, edx); - __ j(equal, masm->isolate()->builtins()->CallFunction(), - RelocInfo::CODE_TARGET); - __ CmpInstanceType(edx, JS_FUNCTION_PROXY_TYPE); - __ j(not_equal, &non_function); - - // 1. Call to function proxy. - // TODO(neis): This doesn't match the ES6 spec for [[Call]] on proxies. - __ mov(edi, FieldOperand(edi, JSFunctionProxy::kCallTrapOffset)); - __ AssertNotSmi(edi); - __ jmp(&non_smi); - - // 2. Call to something else, which might have a [[Call]] internal method (if - // not we raise an exception). - __ bind(&non_function); - // TODO(bmeurer): I wonder why we prefer to have slow API calls? This could - // be awesome instead; i.e. a trivial improvement would be to call into the - // runtime and just deal with the API function there instead of returning a - // delegate from a runtime call that just jumps back to the runtime once - // called. Or, bonus points, call directly into the C API function here, as - // we do in some Crankshaft fast cases. - // Overwrite the original receiver with the (original) target. - __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), edi); - { - // Determine the delegate for the target (if any). - FrameScope scope(masm, StackFrame::INTERNAL); - __ SmiTag(eax); - __ Push(eax); - __ Push(edi); - __ CallRuntime(Runtime::kGetFunctionDelegate, 1); - __ mov(edi, eax); - __ Pop(eax); - __ SmiUntag(eax); - } - // The delegate is always a regular function. - __ AssertFunction(edi); - __ Jump(masm->isolate()->builtins()->CallFunction(), RelocInfo::CODE_TARGET); -} - - void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { // ----------- S t a t e ------------- // -- eax : actual number of arguments diff --git a/src/ia32/code-stubs-ia32.cc b/src/ia32/code-stubs-ia32.cc index 029e8cf23..e32b1155a 100644 --- a/src/ia32/code-stubs-ia32.cc +++ b/src/ia32/code-stubs-ia32.cc @@ -2039,9 +2039,33 @@ static void EmitContinueIfStrictOrNative(MacroAssembler* masm, Label* cont) { } -static void EmitSlowCase(Isolate* isolate, MacroAssembler* masm, int argc) { - __ Set(eax, argc); - __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); +static void EmitSlowCase(Isolate* isolate, + MacroAssembler* masm, + int argc, + Label* non_function) { + // Check for function proxy. + __ CmpInstanceType(ecx, JS_FUNCTION_PROXY_TYPE); + __ j(not_equal, non_function); + __ pop(ecx); + __ push(edi); // put proxy as additional argument under return address + __ push(ecx); + __ Move(eax, Immediate(argc + 1)); + __ Move(ebx, Immediate(0)); + __ GetBuiltinEntry(edx, Context::CALL_FUNCTION_PROXY_BUILTIN_INDEX); + { + Handle adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline(); + __ jmp(adaptor, RelocInfo::CODE_TARGET); + } + + // CALL_NON_FUNCTION expects the non-function callee as receiver (instead + // of the original receiver from the call site). + __ bind(non_function); + __ mov(Operand(esp, (argc + 1) * kPointerSize), edi); + __ Move(eax, Immediate(argc)); + __ Move(ebx, Immediate(0)); + __ GetBuiltinEntry(edx, Context::CALL_NON_FUNCTION_BUILTIN_INDEX); + Handle adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline(); + __ jmp(adaptor, RelocInfo::CODE_TARGET); } @@ -2062,11 +2086,11 @@ static void CallFunctionNoFeedback(MacroAssembler* masm, int argc, bool needs_checks, bool call_as_method) { // edi : the function to call - Label slow, wrap, cont; + Label slow, non_function, wrap, cont; if (needs_checks) { // Check that the function really is a JavaScript function. - __ JumpIfSmi(edi, &slow); + __ JumpIfSmi(edi, &non_function); // Goto slow case if we do not have a function. __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx); @@ -2101,7 +2125,8 @@ static void CallFunctionNoFeedback(MacroAssembler* masm, if (needs_checks) { // Slow-case: Non-function called. __ bind(&slow); - EmitSlowCase(masm->isolate(), masm, argc); + // (non_function is bound in EmitSlowCase) + EmitSlowCase(masm->isolate(), masm, argc, &non_function); } if (call_as_method) { @@ -2245,8 +2270,13 @@ void CallIC_ArrayStub::Generate(MacroAssembler* masm) { GenerateMiss(masm); // The slow case, we need this no matter what to complete a call after a miss. - __ Set(eax, arg_count()); - __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); + CallFunctionNoFeedback(masm, + arg_count(), + true, + CallAsMethod()); + + // Unreachable. + __ int3(); } @@ -2260,7 +2290,7 @@ void CallICStub::Generate(MacroAssembler* masm) { const int generic_offset = FixedArray::OffsetOfElementAt(TypeFeedbackVector::kGenericCountIndex); Label extra_checks_or_miss, slow_start; - Label slow, wrap, cont; + Label slow, non_function, wrap, cont; Label have_js_function; int argc = arg_count(); ParameterCount actual(argc); @@ -2313,7 +2343,7 @@ void CallICStub::Generate(MacroAssembler* masm) { __ InvokeFunction(edi, actual, JUMP_FUNCTION, NullCallWrapper()); __ bind(&slow); - EmitSlowCase(isolate, masm, argc); + EmitSlowCase(isolate, masm, argc, &non_function); if (CallAsMethod()) { __ bind(&wrap); @@ -2394,7 +2424,7 @@ void CallICStub::Generate(MacroAssembler* masm) { __ bind(&slow_start); // Check that the function really is a JavaScript function. - __ JumpIfSmi(edi, &slow); + __ JumpIfSmi(edi, &non_function); // Goto slow case if we do not have a function. __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx); diff --git a/src/ia32/macro-assembler-ia32.cc b/src/ia32/macro-assembler-ia32.cc index dac82b447..b05384d05 100644 --- a/src/ia32/macro-assembler-ia32.cc +++ b/src/ia32/macro-assembler-ia32.cc @@ -116,12 +116,6 @@ void MacroAssembler::CompareRoot(const Operand& with, } -void MacroAssembler::PushRoot(Heap::RootListIndex index) { - DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(index)); - Push(isolate()->heap()->root_handle(index)); -} - - void MacroAssembler::InNewSpace( Register object, Register scratch, @@ -810,18 +804,6 @@ void MacroAssembler::AssertName(Register object) { } -void MacroAssembler::AssertFunction(Register object) { - if (emit_debug_code()) { - test(object, Immediate(kSmiTagMask)); - Check(not_equal, kOperandIsASmiAndNotAFunction); - Push(object); - CmpObjectType(object, JS_FUNCTION_TYPE, object); - Pop(object); - Check(not_equal, kOperandIsNotAFunction); - } -} - - void MacroAssembler::AssertUndefinedOrAllocationSite(Register object) { if (emit_debug_code()) { Label done_checking; @@ -2115,12 +2097,6 @@ void MacroAssembler::LoadContext(Register dst, int context_chain_length) { } -void MacroAssembler::LoadGlobalProxy(Register dst) { - mov(dst, GlobalObjectOperand()); - mov(dst, FieldOperand(dst, GlobalObject::kGlobalProxyOffset)); -} - - void MacroAssembler::LoadTransitionedArrayMapConditional( ElementsKind expected_kind, ElementsKind transitioned_kind, diff --git a/src/ia32/macro-assembler-ia32.h b/src/ia32/macro-assembler-ia32.h index f57bdb7e8..dd90650f6 100644 --- a/src/ia32/macro-assembler-ia32.h +++ b/src/ia32/macro-assembler-ia32.h @@ -71,16 +71,6 @@ class MacroAssembler: public Assembler { void Load(Register dst, const Operand& src, Representation r); void Store(Register src, const Operand& dst, Representation r); - // Load a register with a long value as efficiently as possible. - void Set(Register dst, int32_t x) { - if (x == 0) { - xor_(dst, dst); - } else { - mov(dst, Immediate(x)); - } - } - void Set(const Operand& dst, int32_t x) { mov(dst, Immediate(x)); } - // Operations on roots in the root-array. void LoadRoot(Register destination, Heap::RootListIndex index); void StoreRoot(Register source, Register scratch, Heap::RootListIndex index); @@ -89,22 +79,6 @@ class MacroAssembler: public Assembler { // and not in new space). void CompareRoot(Register with, Heap::RootListIndex index); void CompareRoot(const Operand& with, Heap::RootListIndex index); - void PushRoot(Heap::RootListIndex index); - - // Compare the object in a register to a value and jump if they are equal. - void JumpIfRoot(Register with, Heap::RootListIndex index, Label* if_equal, - Label::Distance if_equal_distance = Label::kNear) { - CompareRoot(with, index); - j(equal, if_equal, if_equal_distance); - } - - // Compare the object in a register to a value and jump if they are not equal. - void JumpIfNotRoot(Register with, Heap::RootListIndex index, - Label* if_not_equal, - Label::Distance if_not_equal_distance = Label::kNear) { - CompareRoot(with, index); - j(not_equal, if_not_equal, if_not_equal_distance); - } // --------------------------------------------------------------------------- // GC Support @@ -288,9 +262,6 @@ class MacroAssembler: public Assembler { // Find the function context up the context chain. void LoadContext(Register dst, int context_chain_length); - // Load the global proxy from the current context. - void LoadGlobalProxy(Register dst); - // Conditionally load the cached Array transitioned map of type // transitioned_kind from the native context if the map in register // map_in_out is the cached Array map in the native context of @@ -594,9 +565,6 @@ class MacroAssembler: public Assembler { // Abort execution if argument is not a name, enabled via --debug-code. void AssertName(Register object); - // Abort execution if argument is not a JSFunction, enabled via --debug-code. - void AssertFunction(Register object); - // Abort execution if argument is not undefined or an AllocationSite, enabled // via --debug-code. void AssertUndefinedOrAllocationSite(Register object); @@ -835,14 +803,8 @@ class MacroAssembler: public Assembler { void Drop(int element_count); void Call(Label* target) { call(target); } - void Call(Handle target, RelocInfo::Mode rmode) { call(target, rmode); } - void Jump(Handle target, RelocInfo::Mode rmode) { jmp(target, rmode); } void Push(Register src) { push(src); } - void Push(const Operand& src) { push(src); } - void Push(Immediate value) { push(value); } void Pop(Register dst) { pop(dst); } - void PushReturnAddressFrom(Register src) { push(src); } - void PopReturnAddressTo(Register dst) { pop(dst); } // Non-SSE2 instructions. void Pextrd(Register dst, XMMRegister src, int8_t imm8); diff --git a/src/mips/builtins-mips.cc b/src/mips/builtins-mips.cc index 6e51b0764..bd8c85285 100644 --- a/src/mips/builtins-mips.cc +++ b/src/mips/builtins-mips.cc @@ -1252,31 +1252,129 @@ void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) { } -// static void Builtins::Generate_FunctionCall(MacroAssembler* masm) { // 1. Make sure we have at least one argument. // a0: actual number of arguments - { - Label done; + { Label done; __ Branch(&done, ne, a0, Operand(zero_reg)); - __ PushRoot(Heap::kUndefinedValueRootIndex); + __ LoadRoot(t2, Heap::kUndefinedValueRootIndex); + __ push(t2); __ Addu(a0, a0, Operand(1)); __ bind(&done); } - // 2. Get the function to call (passed as receiver) from the stack. + // 2. Get the function to call (passed as receiver) from the stack, check + // if it is a function. // a0: actual number of arguments + Label slow, non_function; __ sll(at, a0, kPointerSizeLog2); __ addu(at, sp, at); __ lw(a1, MemOperand(at)); + __ JumpIfSmi(a1, &non_function); + __ GetObjectType(a1, a2, a2); + __ Branch(&slow, ne, a2, Operand(JS_FUNCTION_TYPE)); + + // 3a. Patch the first argument if necessary when calling a function. + // a0: actual number of arguments + // a1: function + Label shift_arguments; + __ li(t0, Operand(0, RelocInfo::NONE32)); // Indicate regular JS_FUNCTION. + { Label convert_to_object, use_global_proxy, patch_receiver; + // Change context eagerly in case we need the global receiver. + __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); + + // Do not transform the receiver for strict mode functions. + __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); + __ lw(a3, FieldMemOperand(a2, SharedFunctionInfo::kCompilerHintsOffset)); + __ And(t3, a3, Operand(1 << (SharedFunctionInfo::kStrictModeFunction + + kSmiTagSize))); + __ Branch(&shift_arguments, ne, t3, Operand(zero_reg)); + + // Do not transform the receiver for native (Compilerhints already in a3). + __ And(t3, a3, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize))); + __ Branch(&shift_arguments, ne, t3, Operand(zero_reg)); + + // Compute the receiver in sloppy mode. + // Load first argument in a2. a2 = -kPointerSize(sp + n_args << 2). + __ sll(at, a0, kPointerSizeLog2); + __ addu(a2, sp, at); + __ lw(a2, MemOperand(a2, -kPointerSize)); + // a0: actual number of arguments + // a1: function + // a2: first argument + __ JumpIfSmi(a2, &convert_to_object, t2); + + __ LoadRoot(a3, Heap::kUndefinedValueRootIndex); + __ Branch(&use_global_proxy, eq, a2, Operand(a3)); + __ LoadRoot(a3, Heap::kNullValueRootIndex); + __ Branch(&use_global_proxy, eq, a2, Operand(a3)); - // 3. Shift arguments and return address one slot down on the stack + STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); + __ GetObjectType(a2, a3, a3); + __ Branch(&shift_arguments, ge, a3, Operand(FIRST_SPEC_OBJECT_TYPE)); + + __ bind(&convert_to_object); + // Enter an internal frame in order to preserve argument count. + { + FrameScope scope(masm, StackFrame::INTERNAL); + __ sll(a0, a0, kSmiTagSize); // Smi tagged. + __ push(a0); + __ mov(a0, a2); + ToObjectStub stub(masm->isolate()); + __ CallStub(&stub); + __ mov(a2, v0); + + __ pop(a0); + __ sra(a0, a0, kSmiTagSize); // Un-tag. + // Leave internal frame. + } + + // Restore the function to a1, and the flag to t0. + __ sll(at, a0, kPointerSizeLog2); + __ addu(at, sp, at); + __ lw(a1, MemOperand(at)); + __ Branch(USE_DELAY_SLOT, &patch_receiver); + __ li(t0, Operand(0, RelocInfo::NONE32)); // In delay slot. + + __ bind(&use_global_proxy); + __ lw(a2, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); + __ lw(a2, FieldMemOperand(a2, GlobalObject::kGlobalProxyOffset)); + + __ bind(&patch_receiver); + __ sll(at, a0, kPointerSizeLog2); + __ addu(a3, sp, at); + __ sw(a2, MemOperand(a3, -kPointerSize)); + + __ Branch(&shift_arguments); + } + + // 3b. Check for function proxy. + __ bind(&slow); + __ li(t0, Operand(1, RelocInfo::NONE32)); // Indicate function proxy. + __ Branch(&shift_arguments, eq, a2, Operand(JS_FUNCTION_PROXY_TYPE)); + + __ bind(&non_function); + __ li(t0, Operand(2, RelocInfo::NONE32)); // Indicate non-function. + + // 3c. Patch the first argument when calling a non-function. The + // CALL_NON_FUNCTION builtin expects the non-function callee as + // receiver, so overwrite the first argument which will ultimately + // become the receiver. + // a0: actual number of arguments + // a1: function + // t0: call type (0: JS function, 1: function proxy, 2: non-function) + __ sll(at, a0, kPointerSizeLog2); + __ addu(a2, sp, at); + __ sw(a1, MemOperand(a2, -kPointerSize)); + + // 4. Shift arguments and return address one slot down on the stack // (overwriting the original receiver). Adjust argument count to make // the original first argument the new receiver. // a0: actual number of arguments // a1: function - { - Label loop; + // t0: call type (0: JS function, 1: function proxy, 2: non-function) + __ bind(&shift_arguments); + { Label loop; // Calculate the copy start address (destination). Copy end address is sp. __ sll(at, a0, kPointerSizeLog2); __ addu(a2, sp, at); @@ -1292,8 +1390,46 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) { __ Pop(); } - // 4. Call the callable. - __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); + // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin, + // or a function proxy via CALL_FUNCTION_PROXY. + // a0: actual number of arguments + // a1: function + // t0: call type (0: JS function, 1: function proxy, 2: non-function) + { Label function, non_proxy; + __ Branch(&function, eq, t0, Operand(zero_reg)); + // Expected number of arguments is 0 for CALL_NON_FUNCTION. + __ mov(a2, zero_reg); + __ Branch(&non_proxy, ne, t0, Operand(1)); + + __ push(a1); // Re-add proxy object as additional argument. + __ Addu(a0, a0, Operand(1)); + __ GetBuiltinFunction(a1, Context::CALL_FUNCTION_PROXY_BUILTIN_INDEX); + __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), + RelocInfo::CODE_TARGET); + + __ bind(&non_proxy); + __ GetBuiltinFunction(a1, Context::CALL_NON_FUNCTION_BUILTIN_INDEX); + __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), + RelocInfo::CODE_TARGET); + __ bind(&function); + } + + // 5b. Get the code to call from the function and check that the number of + // expected arguments matches what we're providing. If so, jump + // (tail-call) to the code in register edx without checking arguments. + // a0: actual number of arguments + // a1: function + __ lw(a3, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); + __ lw(a2, + FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset)); + __ sra(a2, a2, kSmiTagSize); + // Check formal and actual parameter counts. + __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), + RelocInfo::CODE_TARGET, ne, a2, Operand(a0)); + + __ lw(a3, FieldMemOperand(a1, JSFunction::kCodeEntryOffset)); + ParameterCount expected(0); + __ InvokeCode(a3, expected, expected, JUMP_FUNCTION, NullCallWrapper()); } @@ -1357,8 +1493,9 @@ static void Generate_ApplyHelper(MacroAssembler* masm, bool targetIsArgument) { const int kFunctionOffset = kReceiverOffset + kPointerSize; __ lw(a0, MemOperand(fp, kFunctionOffset)); // Get the function. - __ lw(a1, MemOperand(fp, kArgumentsOffset)); // Get the args array. - __ Push(a0, a1); + __ push(a0); + __ lw(a0, MemOperand(fp, kArgumentsOffset)); // Get the args array. + __ push(a0); // Returns (in v0) number of arguments to copy to stack as Smi. if (targetIsArgument) { __ InvokeBuiltin(Context::REFLECT_APPLY_PREPARE_BUILTIN_INDEX, @@ -1376,18 +1513,89 @@ static void Generate_ApplyHelper(MacroAssembler* masm, bool targetIsArgument) { const int kLimitOffset = StandardFrameConstants::kExpressionsOffset - (1 * kPointerSize); __ mov(a1, zero_reg); - __ lw(a2, MemOperand(fp, kReceiverOffset)); - __ Push(v0, a1, a2); // limit, initial index and receiver. + __ Push(v0, a1); // Limit and initial index. + + // Get the receiver. + __ lw(a0, MemOperand(fp, kReceiverOffset)); + + // Check that the function is a JS function (otherwise it must be a proxy). + Label push_receiver; + __ lw(a1, MemOperand(fp, kFunctionOffset)); + __ GetObjectType(a1, a2, a2); + __ Branch(&push_receiver, ne, a2, Operand(JS_FUNCTION_TYPE)); + + // Change context eagerly to get the right global object if necessary. + __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); + // Load the shared function info while the function is still in a1. + __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); + + // Compute the receiver. + // Do not transform the receiver for strict mode functions. + Label call_to_object, use_global_proxy; + __ lw(a2, FieldMemOperand(a2, SharedFunctionInfo::kCompilerHintsOffset)); + __ And(t3, a2, Operand(1 << (SharedFunctionInfo::kStrictModeFunction + + kSmiTagSize))); + __ Branch(&push_receiver, ne, t3, Operand(zero_reg)); + + // Do not transform the receiver for native (Compilerhints already in a2). + __ And(t3, a2, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize))); + __ Branch(&push_receiver, ne, t3, Operand(zero_reg)); + + // Compute the receiver in sloppy mode. + __ JumpIfSmi(a0, &call_to_object); + __ LoadRoot(a1, Heap::kNullValueRootIndex); + __ Branch(&use_global_proxy, eq, a0, Operand(a1)); + __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); + __ Branch(&use_global_proxy, eq, a0, Operand(a2)); + + // Check if the receiver is already a JavaScript object. + // a0: receiver + STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); + __ GetObjectType(a0, a1, a1); + __ Branch(&push_receiver, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE)); + + // Convert the receiver to a regular object. + // a0: receiver + __ bind(&call_to_object); + ToObjectStub stub(masm->isolate()); + __ CallStub(&stub); + __ mov(a0, v0); // Put object in a0 to match other paths to push_receiver. + __ Branch(&push_receiver); + + __ bind(&use_global_proxy); + __ lw(a0, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); + __ lw(a0, FieldMemOperand(a0, GlobalObject::kGlobalProxyOffset)); + + // Push the receiver. + // a0: receiver + __ bind(&push_receiver); + __ push(a0); // Copy all arguments from the array to the stack. - Generate_PushAppliedArguments(masm, kArgumentsOffset, kIndexOffset, - kLimitOffset); + Generate_PushAppliedArguments( + masm, kArgumentsOffset, kIndexOffset, kLimitOffset); - // Call the callable. - // TODO(bmeurer): This should be a tail call according to ES6. + // Call the function. + Label call_proxy; + ParameterCount actual(a0); __ lw(a1, MemOperand(fp, kFunctionOffset)); - __ Call(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); + __ GetObjectType(a1, a2, a2); + __ Branch(&call_proxy, ne, a2, Operand(JS_FUNCTION_TYPE)); + + __ InvokeFunction(a1, actual, CALL_FUNCTION, NullCallWrapper()); + + frame_scope.GenerateLeaveFrame(); + __ Ret(USE_DELAY_SLOT); + __ Addu(sp, sp, Operand(kStackSize * kPointerSize)); // In delay slot. + // Call the function proxy. + __ bind(&call_proxy); + __ push(a1); // Add function proxy as last argument. + __ Addu(a0, a0, Operand(1)); + __ li(a2, Operand(0, RelocInfo::NONE32)); + __ GetBuiltinFunction(a1, Context::CALL_FUNCTION_PROXY_BUILTIN_INDEX); + __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), + RelocInfo::CODE_TARGET); // Tear down the internal frame and remove function, receiver and args. } @@ -1523,147 +1731,6 @@ static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) { } -// static -void Builtins::Generate_CallFunction(MacroAssembler* masm) { - // ----------- S t a t e ------------- - // -- a0 : the number of arguments (not including the receiver) - // -- a1 : the function to call (checked to be a JSFunction) - // ----------------------------------- - - Label convert, convert_global_proxy, convert_to_object, done_convert; - __ AssertFunction(a1); - // TODO(bmeurer): Throw a TypeError if function's [[FunctionKind]] internal - // slot is "classConstructor". - // Enter the context of the function; ToObject has to run in the function - // context, and we also need to take the global proxy from the function - // context in case of conversion. - // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList) - STATIC_ASSERT(SharedFunctionInfo::kNativeByteOffset == - SharedFunctionInfo::kStrictModeByteOffset); - __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); - __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); - // We need to convert the receiver for non-native sloppy mode functions. - __ lbu(a3, FieldMemOperand(a2, SharedFunctionInfo::kNativeByteOffset)); - __ And(at, a3, Operand((1 << SharedFunctionInfo::kNativeBitWithinByte) | - (1 << SharedFunctionInfo::kStrictModeBitWithinByte))); - __ Branch(&done_convert, ne, at, Operand(zero_reg)); - { - __ sll(at, a0, kPointerSizeLog2); - __ addu(at, sp, at); - __ lw(a3, MemOperand(at)); - - // ----------- S t a t e ------------- - // -- a0 : the number of arguments (not including the receiver) - // -- a1 : the function to call (checked to be a JSFunction) - // -- a2 : the shared function info. - // -- a3 : the receiver - // -- cp : the function context. - // ----------------------------------- - - Label convert_receiver; - __ JumpIfSmi(a3, &convert_to_object); - STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE); - __ GetObjectType(a3, t0, t0); - __ Branch(&done_convert, hs, t0, Operand(FIRST_JS_RECEIVER_TYPE)); - __ JumpIfRoot(a3, Heap::kUndefinedValueRootIndex, &convert_global_proxy); - __ JumpIfNotRoot(a3, Heap::kNullValueRootIndex, &convert_to_object); - __ bind(&convert_global_proxy); - { - // Patch receiver to global proxy. - __ LoadGlobalProxy(a3); - } - __ Branch(&convert_receiver); - __ bind(&convert_to_object); - { - // Convert receiver using ToObject. - // TODO(bmeurer): Inline the allocation here to avoid building the frame - // in the fast case? (fall back to AllocateInNewSpace?) - FrameScope scope(masm, StackFrame::INTERNAL); - __ sll(a0, a0, kSmiTagSize); // Smi tagged. - __ Push(a0, a1); - __ mov(a0, a3); - ToObjectStub stub(masm->isolate()); - __ CallStub(&stub); - __ mov(a3, v0); - __ Pop(a0, a1); - __ sra(a0, a0, kSmiTagSize); // Un-tag. - } - __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); - __ bind(&convert_receiver); - __ sll(at, a0, kPointerSizeLog2); - __ addu(at, sp, at); - __ sw(a3, MemOperand(at)); - } - __ bind(&done_convert); - - // ----------- S t a t e ------------- - // -- a0 : the number of arguments (not including the receiver) - // -- a1 : the function to call (checked to be a JSFunction) - // -- a2 : the shared function info. - // -- cp : the function context. - // ----------------------------------- - - __ lw(a2, - FieldMemOperand(a2, SharedFunctionInfo::kFormalParameterCountOffset)); - __ sra(a2, a2, kSmiTagSize); // Un-tag. - __ lw(a3, FieldMemOperand(a1, JSFunction::kCodeEntryOffset)); - ParameterCount actual(a0); - ParameterCount expected(a2); - __ InvokeCode(a3, expected, actual, JUMP_FUNCTION, NullCallWrapper()); -} - - -// static -void Builtins::Generate_Call(MacroAssembler* masm) { - // ----------- S t a t e ------------- - // -- a0 : the number of arguments (not including the receiver) - // -- a1 : the target to call (can be any Object). - // ----------------------------------- - - Label non_smi, non_function; - __ JumpIfSmi(a1, &non_function); - __ bind(&non_smi); - __ GetObjectType(a1, a2, a2); - __ Jump(masm->isolate()->builtins()->CallFunction(), RelocInfo::CODE_TARGET, - eq, a2, Operand(JS_FUNCTION_TYPE)); - __ Branch(&non_function, ne, a2, Operand(JS_FUNCTION_PROXY_TYPE)); - - - // 1. Call to function proxy. - // TODO(neis): This doesn't match the ES6 spec for [[Call]] on proxies. - __ lw(a1, FieldMemOperand(a1, JSFunctionProxy::kCallTrapOffset)); - __ AssertNotSmi(a1); - __ Branch(&non_smi); - - // 2. Call to something else, which might have a [[Call]] internal method (if - // not we raise an exception). - __ bind(&non_function); - // TODO(bmeurer): I wonder why we prefer to have slow API calls? This could - // be awesome instead; i.e. a trivial improvement would be to call into the - // runtime and just deal with the API function there instead of returning a - // delegate from a runtime call that just jumps back to the runtime once - // called. Or, bonus points, call directly into the C API function here, as - // we do in some Crankshaft fast cases. - // Overwrite the original receiver with the (original) target. - __ sll(at, a0, kPointerSizeLog2); - __ addu(at, sp, at); - __ sw(a1, MemOperand(at)); - { - // Determine the delegate for the target (if any). - FrameScope scope(masm, StackFrame::INTERNAL); - __ sll(a0, a0, kSmiTagSize); // Smi tagged. - __ Push(a0, a1); - __ CallRuntime(Runtime::kGetFunctionDelegate, 1); - __ mov(a1, v0); - __ Pop(a0); - __ sra(a0, a0, kSmiTagSize); // Un-tag. - } - // The delegate is always a regular function. - __ AssertFunction(a1); - __ Jump(masm->isolate()->builtins()->CallFunction(), RelocInfo::CODE_TARGET); -} - - void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { // State setup as expected by MacroAssembler::InvokePrologue. // ----------- S t a t e ------------- diff --git a/src/mips/code-stubs-mips.cc b/src/mips/code-stubs-mips.cc index 3053cb769..11b14be66 100644 --- a/src/mips/code-stubs-mips.cc +++ b/src/mips/code-stubs-mips.cc @@ -2524,9 +2524,30 @@ static void EmitContinueIfStrictOrNative(MacroAssembler* masm, Label* cont) { } -static void EmitSlowCase(MacroAssembler* masm, int argc) { - __ li(a0, Operand(argc)); - __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); +static void EmitSlowCase(MacroAssembler* masm, + int argc, + Label* non_function) { + // Check for function proxy. + __ Branch(non_function, ne, t0, Operand(JS_FUNCTION_PROXY_TYPE)); + __ push(a1); // put proxy as additional argument + __ li(a0, Operand(argc + 1, RelocInfo::NONE32)); + __ mov(a2, zero_reg); + __ GetBuiltinFunction(a1, Context::CALL_FUNCTION_PROXY_BUILTIN_INDEX); + { + Handle adaptor = + masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); + __ Jump(adaptor, RelocInfo::CODE_TARGET); + } + + // CALL_NON_FUNCTION expects the non-function callee as receiver (instead + // of the original receiver from the call site). + __ bind(non_function); + __ sw(a1, MemOperand(sp, argc * kPointerSize)); + __ li(a0, Operand(argc)); // Set up the number of arguments. + __ mov(a2, zero_reg); + __ GetBuiltinFunction(a1, Context::CALL_NON_FUNCTION_BUILTIN_INDEX); + __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), + RelocInfo::CODE_TARGET); } @@ -2548,12 +2569,12 @@ static void CallFunctionNoFeedback(MacroAssembler* masm, int argc, bool needs_checks, bool call_as_method) { // a1 : the function to call - Label slow, wrap, cont; + Label slow, non_function, wrap, cont; if (needs_checks) { // Check that the function is really a JavaScript function. // a1: pushed function (to be verified) - __ JumpIfSmi(a1, &slow); + __ JumpIfSmi(a1, &non_function); // Goto slow case if we do not have a function. __ GetObjectType(a1, t0, t0); @@ -2588,7 +2609,7 @@ static void CallFunctionNoFeedback(MacroAssembler* masm, if (needs_checks) { // Slow-case: Non-function called. __ bind(&slow); - EmitSlowCase(masm, argc); + EmitSlowCase(masm, argc, &non_function); } if (call_as_method) { @@ -2722,8 +2743,13 @@ void CallIC_ArrayStub::Generate(MacroAssembler* masm) { GenerateMiss(masm); // The slow case, we need this no matter what to complete a call after a miss. - __ li(a0, Operand(arg_count())); - __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); + CallFunctionNoFeedback(masm, + arg_count(), + true, + CallAsMethod()); + + // Unreachable. + __ stop("Unexpected code address"); } @@ -2736,7 +2762,7 @@ void CallICStub::Generate(MacroAssembler* masm) { const int generic_offset = FixedArray::OffsetOfElementAt(TypeFeedbackVector::kGenericCountIndex); Label extra_checks_or_miss, slow_start; - Label slow, wrap, cont; + Label slow, non_function, wrap, cont; Label have_js_function; int argc = arg_count(); ParameterCount actual(argc); @@ -2790,7 +2816,7 @@ void CallICStub::Generate(MacroAssembler* masm) { __ InvokeFunction(a1, actual, JUMP_FUNCTION, NullCallWrapper()); __ bind(&slow); - EmitSlowCase(masm, argc); + EmitSlowCase(masm, argc, &non_function); if (CallAsMethod()) { __ bind(&wrap); @@ -2878,7 +2904,7 @@ void CallICStub::Generate(MacroAssembler* masm) { __ bind(&slow_start); // Check that the function is really a JavaScript function. // r1: pushed function (to be verified) - __ JumpIfSmi(a1, &slow); + __ JumpIfSmi(a1, &non_function); // Goto slow case if we do not have a function. __ GetObjectType(a1, t0, t0); diff --git a/src/mips/macro-assembler-mips.cc b/src/mips/macro-assembler-mips.cc index d81016374..9ba814ae2 100644 --- a/src/mips/macro-assembler-mips.cc +++ b/src/mips/macro-assembler-mips.cc @@ -1,4 +1,3 @@ - // Copyright 2012 the V8 project authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. @@ -4744,12 +4743,6 @@ void MacroAssembler::LoadContext(Register dst, int context_chain_length) { } -void MacroAssembler::LoadGlobalProxy(Register dst) { - lw(dst, GlobalObjectOperand()); - lw(dst, FieldMemOperand(dst, GlobalObject::kGlobalProxyOffset)); -} - - void MacroAssembler::LoadTransitionedArrayMapConditional( ElementsKind expected_kind, ElementsKind transitioned_kind, @@ -5175,19 +5168,6 @@ void MacroAssembler::AssertName(Register object) { } -void MacroAssembler::AssertFunction(Register object) { - if (emit_debug_code()) { - STATIC_ASSERT(kSmiTag == 0); - SmiTst(object, t0); - Check(ne, kOperandIsASmiAndNotAFunction, t0, Operand(zero_reg)); - push(object); - GetObjectType(object, object, object); - pop(object); - Check(ne, kOperandIsNotAFunction, object, Operand(JS_FUNCTION_TYPE)); - } -} - - void MacroAssembler::AssertUndefinedOrAllocationSite(Register object, Register scratch) { if (emit_debug_code()) { diff --git a/src/mips/macro-assembler-mips.h b/src/mips/macro-assembler-mips.h index 96eb9b410..5e1d2c625 100644 --- a/src/mips/macro-assembler-mips.h +++ b/src/mips/macro-assembler-mips.h @@ -292,24 +292,6 @@ class MacroAssembler: public Assembler { void Load(Register dst, const MemOperand& src, Representation r); void Store(Register src, const MemOperand& dst, Representation r); - void PushRoot(Heap::RootListIndex index) { - LoadRoot(at, index); - Push(at); - } - - // Compare the object in a register to a value and jump if they are equal. - void JumpIfRoot(Register with, Heap::RootListIndex index, Label* if_equal) { - LoadRoot(at, index); - Branch(if_equal, eq, with, Operand(at)); - } - - // Compare the object in a register to a value and jump if they are not equal. - void JumpIfNotRoot(Register with, Heap::RootListIndex index, - Label* if_not_equal) { - LoadRoot(at, index); - Branch(if_not_equal, ne, with, Operand(at)); - } - // Load an object from the root table. void LoadRoot(Register destination, Heap::RootListIndex index); @@ -941,9 +923,6 @@ class MacroAssembler: public Assembler { void LoadContext(Register dst, int context_chain_length); - // Load the global proxy from the current context. - void LoadGlobalProxy(Register dst); - // Conditionally load the cached Array transitioned map of type // transitioned_kind from the native context if the map in register // map_in_out is the cached Array map in the native context of @@ -1485,9 +1464,6 @@ const Operand& rt = Operand(zero_reg), BranchDelaySlot bd = PROTECT // Abort execution if argument is not a name, enabled via --debug-code. void AssertName(Register object); - // Abort execution if argument is not a JSFunction, enabled via --debug-code. - void AssertFunction(Register object); - // Abort execution if argument is not undefined or an AllocationSite, enabled // via --debug-code. void AssertUndefinedOrAllocationSite(Register object, Register scratch); diff --git a/src/mips64/builtins-mips64.cc b/src/mips64/builtins-mips64.cc index 655d5904d..3c0a32667 100644 --- a/src/mips64/builtins-mips64.cc +++ b/src/mips64/builtins-mips64.cc @@ -1249,31 +1249,128 @@ void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) { } -// static void Builtins::Generate_FunctionCall(MacroAssembler* masm) { // 1. Make sure we have at least one argument. // a0: actual number of arguments - { - Label done; + { Label done; __ Branch(&done, ne, a0, Operand(zero_reg)); - __ PushRoot(Heap::kUndefinedValueRootIndex); + __ LoadRoot(a6, Heap::kUndefinedValueRootIndex); + __ push(a6); __ Daddu(a0, a0, Operand(1)); __ bind(&done); } - // 2. Get the function to call (passed as receiver) from the stack. + // 2. Get the function to call (passed as receiver) from the stack, check + // if it is a function. // a0: actual number of arguments + Label slow, non_function; __ dsll(at, a0, kPointerSizeLog2); __ daddu(at, sp, at); __ ld(a1, MemOperand(at)); + __ JumpIfSmi(a1, &non_function); + __ GetObjectType(a1, a2, a2); + __ Branch(&slow, ne, a2, Operand(JS_FUNCTION_TYPE)); + + // 3a. Patch the first argument if necessary when calling a function. + // a0: actual number of arguments + // a1: function + Label shift_arguments; + __ li(a4, Operand(0, RelocInfo::NONE32)); // Indicate regular JS_FUNCTION. + { Label convert_to_object, use_global_proxy, patch_receiver; + // Change context eagerly in case we need the global receiver. + __ ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); + + // Do not transform the receiver for strict mode functions. + __ ld(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); + __ lbu(a3, FieldMemOperand(a2, SharedFunctionInfo::kStrictModeByteOffset)); + __ And(a7, a3, Operand(1 << SharedFunctionInfo::kStrictModeBitWithinByte)); + __ Branch(&shift_arguments, ne, a7, Operand(zero_reg)); + + // Do not transform the receiver for native (Compilerhints already in a3). + __ lbu(a3, FieldMemOperand(a2, SharedFunctionInfo::kNativeByteOffset)); + __ And(a7, a3, Operand(1 << SharedFunctionInfo::kNativeBitWithinByte)); + __ Branch(&shift_arguments, ne, a7, Operand(zero_reg)); + + // Compute the receiver in sloppy mode. + // Load first argument in a2. a2 = -kPointerSize(sp + n_args << 2). + __ dsll(at, a0, kPointerSizeLog2); + __ daddu(a2, sp, at); + __ ld(a2, MemOperand(a2, -kPointerSize)); + // a0: actual number of arguments + // a1: function + // a2: first argument + __ JumpIfSmi(a2, &convert_to_object, a6); + + __ LoadRoot(a3, Heap::kUndefinedValueRootIndex); + __ Branch(&use_global_proxy, eq, a2, Operand(a3)); + __ LoadRoot(a3, Heap::kNullValueRootIndex); + __ Branch(&use_global_proxy, eq, a2, Operand(a3)); + + STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); + __ GetObjectType(a2, a3, a3); + __ Branch(&shift_arguments, ge, a3, Operand(FIRST_SPEC_OBJECT_TYPE)); + + __ bind(&convert_to_object); + // Enter an internal frame in order to preserve argument count. + { + FrameScope scope(masm, StackFrame::INTERNAL); + __ SmiTag(a0); + __ Push(a0); + __ mov(a0, a2); + ToObjectStub stub(masm->isolate()); + __ CallStub(&stub); + __ mov(a2, v0); + + __ pop(a0); + __ SmiUntag(a0); + // Leave internal frame. + } + // Restore the function to a1, and the flag to a4. + __ dsll(at, a0, kPointerSizeLog2); + __ daddu(at, sp, at); + __ ld(a1, MemOperand(at)); + __ Branch(USE_DELAY_SLOT, &patch_receiver); + __ li(a4, Operand(0, RelocInfo::NONE32)); + + __ bind(&use_global_proxy); + __ ld(a2, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); + __ ld(a2, FieldMemOperand(a2, GlobalObject::kGlobalProxyOffset)); + + __ bind(&patch_receiver); + __ dsll(at, a0, kPointerSizeLog2); + __ daddu(a3, sp, at); + __ sd(a2, MemOperand(a3, -kPointerSize)); + + __ Branch(&shift_arguments); + } + + // 3b. Check for function proxy. + __ bind(&slow); + __ li(a4, Operand(1, RelocInfo::NONE32)); // Indicate function proxy. + __ Branch(&shift_arguments, eq, a2, Operand(JS_FUNCTION_PROXY_TYPE)); - // 3. Shift arguments and return address one slot down on the stack + __ bind(&non_function); + __ li(a4, Operand(2, RelocInfo::NONE32)); // Indicate non-function. + + // 3c. Patch the first argument when calling a non-function. The + // CALL_NON_FUNCTION builtin expects the non-function callee as + // receiver, so overwrite the first argument which will ultimately + // become the receiver. + // a0: actual number of arguments + // a1: function + // a4: call type (0: JS function, 1: function proxy, 2: non-function) + __ dsll(at, a0, kPointerSizeLog2); + __ daddu(a2, sp, at); + __ sd(a1, MemOperand(a2, -kPointerSize)); + + // 4. Shift arguments and return address one slot down on the stack // (overwriting the original receiver). Adjust argument count to make // the original first argument the new receiver. // a0: actual number of arguments // a1: function - { - Label loop; + // a4: call type (0: JS function, 1: function proxy, 2: non-function) + __ bind(&shift_arguments); + { Label loop; // Calculate the copy start address (destination). Copy end address is sp. __ dsll(at, a0, kPointerSizeLog2); __ daddu(a2, sp, at); @@ -1289,8 +1386,47 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) { __ Pop(); } - // 4. Call the callable. - __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); + // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin, + // or a function proxy via CALL_FUNCTION_PROXY. + // a0: actual number of arguments + // a1: function + // a4: call type (0: JS function, 1: function proxy, 2: non-function) + { Label function, non_proxy; + __ Branch(&function, eq, a4, Operand(zero_reg)); + // Expected number of arguments is 0 for CALL_NON_FUNCTION. + __ mov(a2, zero_reg); + __ Branch(&non_proxy, ne, a4, Operand(1)); + + __ push(a1); // Re-add proxy object as additional argument. + __ Daddu(a0, a0, Operand(1)); + __ GetBuiltinFunction(a1, Context::CALL_FUNCTION_PROXY_BUILTIN_INDEX); + __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), + RelocInfo::CODE_TARGET); + + __ bind(&non_proxy); + __ GetBuiltinFunction(a1, Context::CALL_NON_FUNCTION_BUILTIN_INDEX); + __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), + RelocInfo::CODE_TARGET); + __ bind(&function); + } + + // 5b. Get the code to call from the function and check that the number of + // expected arguments matches what we're providing. If so, jump + // (tail-call) to the code in register edx without checking arguments. + // a0: actual number of arguments + // a1: function + __ ld(a3, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); + // The argument count is stored as int32_t on 64-bit platforms. + // TODO(plind): Smi on 32-bit platforms. + __ lw(a2, + FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset)); + // Check formal and actual parameter counts. + __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), + RelocInfo::CODE_TARGET, ne, a2, Operand(a0)); + + __ ld(a3, FieldMemOperand(a1, JSFunction::kCodeEntryOffset)); + ParameterCount expected(0); + __ InvokeCode(a3, expected, expected, JUMP_FUNCTION, NullCallWrapper()); } @@ -1354,8 +1490,9 @@ static void Generate_ApplyHelper(MacroAssembler* masm, bool targetIsArgument) { const int kFunctionOffset = kReceiverOffset + kPointerSize; __ ld(a0, MemOperand(fp, kFunctionOffset)); // Get the function. - __ ld(a1, MemOperand(fp, kArgumentsOffset)); // Get the args array. - __ Push(a0, a1); + __ push(a0); + __ ld(a0, MemOperand(fp, kArgumentsOffset)); // Get the args array. + __ push(a0); // Returns (in v0) number of arguments to copy to stack as Smi. if (targetIsArgument) { @@ -1374,18 +1511,89 @@ static void Generate_ApplyHelper(MacroAssembler* masm, bool targetIsArgument) { const int kLimitOffset = StandardFrameConstants::kExpressionsOffset - (1 * kPointerSize); __ mov(a1, zero_reg); - __ ld(a2, MemOperand(fp, kReceiverOffset)); - __ Push(v0, a1, a2); // limit, initial index and receiver. + __ Push(v0, a1); // Limit and initial index. + + // Get the receiver. + __ ld(a0, MemOperand(fp, kReceiverOffset)); + + // Check that the function is a JS function (otherwise it must be a proxy). + Label push_receiver; + __ ld(a1, MemOperand(fp, kFunctionOffset)); + __ GetObjectType(a1, a2, a2); + __ Branch(&push_receiver, ne, a2, Operand(JS_FUNCTION_TYPE)); + + // Change context eagerly to get the right global object if necessary. + __ ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); + // Load the shared function info while the function is still in a1. + __ ld(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); + + // Compute the receiver. + // Do not transform the receiver for strict mode functions. + Label call_to_object, use_global_proxy; + __ lbu(a7, FieldMemOperand(a2, SharedFunctionInfo::kStrictModeByteOffset)); + __ And(a7, a7, Operand(1 << SharedFunctionInfo::kStrictModeBitWithinByte)); + __ Branch(&push_receiver, ne, a7, Operand(zero_reg)); + + // Do not transform the receiver for native (Compilerhints already in a2). + __ lbu(a7, FieldMemOperand(a2, SharedFunctionInfo::kNativeByteOffset)); + __ And(a7, a7, Operand(1 << SharedFunctionInfo::kNativeBitWithinByte)); + __ Branch(&push_receiver, ne, a7, Operand(zero_reg)); + + // Compute the receiver in sloppy mode. + __ JumpIfSmi(a0, &call_to_object); + __ LoadRoot(a1, Heap::kNullValueRootIndex); + __ Branch(&use_global_proxy, eq, a0, Operand(a1)); + __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); + __ Branch(&use_global_proxy, eq, a0, Operand(a2)); + + // Check if the receiver is already a JavaScript object. + // a0: receiver + STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); + __ GetObjectType(a0, a1, a1); + __ Branch(&push_receiver, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE)); + + // Convert the receiver to a regular object. + // a0: receiver + __ bind(&call_to_object); + ToObjectStub stub(masm->isolate()); + __ CallStub(&stub); + __ mov(a0, v0); // Put object in a0 to match other paths to push_receiver. + __ Branch(&push_receiver); + + __ bind(&use_global_proxy); + __ ld(a0, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX)); + __ ld(a0, FieldMemOperand(a0, GlobalObject::kGlobalProxyOffset)); + + // Push the receiver. + // a0: receiver + __ bind(&push_receiver); + __ push(a0); // Copy all arguments from the array to the stack. - Generate_PushAppliedArguments(masm, kArgumentsOffset, kIndexOffset, - kLimitOffset); + Generate_PushAppliedArguments( + masm, kArgumentsOffset, kIndexOffset, kLimitOffset); - // Call the callable. - // TODO(bmeurer): This should be a tail call according to ES6. + // Call the function. + Label call_proxy; + ParameterCount actual(a0); __ ld(a1, MemOperand(fp, kFunctionOffset)); - __ Call(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); + __ GetObjectType(a1, a2, a2); + __ Branch(&call_proxy, ne, a2, Operand(JS_FUNCTION_TYPE)); + + __ InvokeFunction(a1, actual, CALL_FUNCTION, NullCallWrapper()); + + frame_scope.GenerateLeaveFrame(); + __ Ret(USE_DELAY_SLOT); + __ Daddu(sp, sp, Operand(kStackSize * kPointerSize)); // In delay slot. + // Call the function proxy. + __ bind(&call_proxy); + __ push(a1); // Add function proxy as last argument. + __ Daddu(a0, a0, Operand(1)); + __ li(a2, Operand(0, RelocInfo::NONE32)); + __ GetBuiltinFunction(a1, Context::CALL_FUNCTION_PROXY_BUILTIN_INDEX); + __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), + RelocInfo::CODE_TARGET); // Tear down the internal frame and remove function, receiver and args. } @@ -1522,145 +1730,6 @@ static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) { } -// static -void Builtins::Generate_CallFunction(MacroAssembler* masm) { - // ----------- S t a t e ------------- - // -- a0 : the number of arguments (not including the receiver) - // -- a1 : the function to call (checked to be a JSFunction) - // ----------------------------------- - - Label convert, convert_global_proxy, convert_to_object, done_convert; - __ AssertFunction(a1); - // TODO(bmeurer): Throw a TypeError if function's [[FunctionKind]] internal - // slot is "classConstructor". - // Enter the context of the function; ToObject has to run in the function - // context, and we also need to take the global proxy from the function - // context in case of conversion. - // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList) - STATIC_ASSERT(SharedFunctionInfo::kNativeByteOffset == - SharedFunctionInfo::kStrictModeByteOffset); - __ ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); - __ ld(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); - // We need to convert the receiver for non-native sloppy mode functions. - __ lbu(a3, FieldMemOperand(a2, SharedFunctionInfo::kNativeByteOffset)); - __ And(at, a3, Operand((1 << SharedFunctionInfo::kNativeBitWithinByte) | - (1 << SharedFunctionInfo::kStrictModeBitWithinByte))); - __ Branch(&done_convert, ne, at, Operand(zero_reg)); - { - __ dsll(at, a0, kPointerSizeLog2); - __ daddu(at, sp, at); - __ ld(a3, MemOperand(at)); - - // ----------- S t a t e ------------- - // -- a0 : the number of arguments (not including the receiver) - // -- a1 : the function to call (checked to be a JSFunction) - // -- a2 : the shared function info. - // -- a3 : the receiver - // -- cp : the function context. - // ----------------------------------- - - Label convert_receiver; - __ JumpIfSmi(a3, &convert_to_object); - STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE); - __ GetObjectType(a3, a4, a4); - __ Branch(&done_convert, hs, a4, Operand(FIRST_JS_RECEIVER_TYPE)); - __ JumpIfRoot(a3, Heap::kUndefinedValueRootIndex, &convert_global_proxy); - __ JumpIfNotRoot(a3, Heap::kNullValueRootIndex, &convert_to_object); - __ bind(&convert_global_proxy); - { - // Patch receiver to global proxy. - __ LoadGlobalProxy(a3); - } - __ Branch(&convert_receiver); - __ bind(&convert_to_object); - { - // Convert receiver using ToObject. - // TODO(bmeurer): Inline the allocation here to avoid building the frame - // in the fast case? (fall back to AllocateInNewSpace?) - FrameScope scope(masm, StackFrame::INTERNAL); - __ SmiTag(a0); - __ Push(a0, a1); - __ mov(a0, a3); - ToObjectStub stub(masm->isolate()); - __ CallStub(&stub); - __ mov(a3, v0); - __ Pop(a0, a1); - __ SmiUntag(a0); - } - __ ld(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); - __ bind(&convert_receiver); - __ dsll(at, a0, kPointerSizeLog2); - __ daddu(at, sp, at); - __ sd(a3, MemOperand(at)); - } - __ bind(&done_convert); - - // ----------- S t a t e ------------- - // -- a0 : the number of arguments (not including the receiver) - // -- a1 : the function to call (checked to be a JSFunction) - // -- a2 : the shared function info. - // -- cp : the function context. - // ----------------------------------- - - __ lw(a2, - FieldMemOperand(a2, SharedFunctionInfo::kFormalParameterCountOffset)); - __ ld(a3, FieldMemOperand(a1, JSFunction::kCodeEntryOffset)); - ParameterCount actual(a0); - ParameterCount expected(a2); - __ InvokeCode(a3, expected, actual, JUMP_FUNCTION, NullCallWrapper()); -} - - -// static -void Builtins::Generate_Call(MacroAssembler* masm) { - // ----------- S t a t e ------------- - // -- a0 : the number of arguments (not including the receiver) - // -- a1 : the target to call (can be any Object). - // ----------------------------------- - - Label non_smi, non_function; - __ JumpIfSmi(a1, &non_function); - __ bind(&non_smi); - __ GetObjectType(a1, a2, a2); - __ Jump(masm->isolate()->builtins()->CallFunction(), RelocInfo::CODE_TARGET, - eq, a2, Operand(JS_FUNCTION_TYPE)); - __ Branch(&non_function, ne, a2, Operand(JS_FUNCTION_PROXY_TYPE)); - - // 1. Call to function proxy. - // TODO(neis): This doesn't match the ES6 spec for [[Call]] on proxies. - __ ld(a1, FieldMemOperand(a1, JSFunctionProxy::kCallTrapOffset)); - __ AssertNotSmi(a1); - __ Branch(&non_smi); - - // 2. Call to something else, which might have a [[Call]] internal method (if - // not we raise an exception). - __ bind(&non_function); - // TODO(bmeurer): I wonder why we prefer to have slow API calls? This could - // be awesome instead; i.e. a trivial improvement would be to call into the - // runtime and just deal with the API function there instead of returning a - // delegate from a runtime call that just jumps back to the runtime once - // called. Or, bonus points, call directly into the C API function here, as - // we do in some Crankshaft fast cases. - // Overwrite the original receiver with the (original) target. - __ dsll(at, a0, kPointerSizeLog2); - __ daddu(at, sp, at); - __ sd(a1, MemOperand(at)); - { - // Determine the delegate for the target (if any). - FrameScope scope(masm, StackFrame::INTERNAL); - __ SmiTag(a0); - __ Push(a0, a1); - __ CallRuntime(Runtime::kGetFunctionDelegate, 1); - __ mov(a1, v0); - __ Pop(a0); - __ SmiUntag(a0); - } - // The delegate is always a regular function. - __ AssertFunction(a1); - __ Jump(masm->isolate()->builtins()->CallFunction(), RelocInfo::CODE_TARGET); -} - - void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { // State setup as expected by MacroAssembler::InvokePrologue. // ----------- S t a t e ------------- diff --git a/src/mips64/code-stubs-mips64.cc b/src/mips64/code-stubs-mips64.cc index 6d5b446e6..8360b07f8 100644 --- a/src/mips64/code-stubs-mips64.cc +++ b/src/mips64/code-stubs-mips64.cc @@ -2562,9 +2562,30 @@ static void EmitContinueIfStrictOrNative(MacroAssembler* masm, Label* cont) { } -static void EmitSlowCase(MacroAssembler* masm, int argc) { - __ li(a0, Operand(argc)); - __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); +static void EmitSlowCase(MacroAssembler* masm, + int argc, + Label* non_function) { + // Check for function proxy. + __ Branch(non_function, ne, a4, Operand(JS_FUNCTION_PROXY_TYPE)); + __ push(a1); // put proxy as additional argument + __ li(a0, Operand(argc + 1, RelocInfo::NONE32)); + __ mov(a2, zero_reg); + __ GetBuiltinFunction(a1, Context::CALL_FUNCTION_PROXY_BUILTIN_INDEX); + { + Handle adaptor = + masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); + __ Jump(adaptor, RelocInfo::CODE_TARGET); + } + + // CALL_NON_FUNCTION expects the non-function callee as receiver (instead + // of the original receiver from the call site). + __ bind(non_function); + __ sd(a1, MemOperand(sp, argc * kPointerSize)); + __ li(a0, Operand(argc)); // Set up the number of arguments. + __ mov(a2, zero_reg); + __ GetBuiltinFunction(a1, Context::CALL_NON_FUNCTION_BUILTIN_INDEX); + __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), + RelocInfo::CODE_TARGET); } @@ -2586,12 +2607,12 @@ static void CallFunctionNoFeedback(MacroAssembler* masm, int argc, bool needs_checks, bool call_as_method) { // a1 : the function to call - Label slow, wrap, cont; + Label slow, non_function, wrap, cont; if (needs_checks) { // Check that the function is really a JavaScript function. // a1: pushed function (to be verified) - __ JumpIfSmi(a1, &slow); + __ JumpIfSmi(a1, &non_function); // Goto slow case if we do not have a function. __ GetObjectType(a1, a4, a4); @@ -2625,7 +2646,7 @@ static void CallFunctionNoFeedback(MacroAssembler* masm, if (needs_checks) { // Slow-case: Non-function called. __ bind(&slow); - EmitSlowCase(masm, argc); + EmitSlowCase(masm, argc, &non_function); } if (call_as_method) { @@ -2798,8 +2819,13 @@ void CallIC_ArrayStub::Generate(MacroAssembler* masm) { GenerateMiss(masm); // The slow case, we need this no matter what to complete a call after a miss. - __ li(a0, Operand(arg_count())); - __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); + CallFunctionNoFeedback(masm, + arg_count(), + true, + CallAsMethod()); + + // Unreachable. + __ stop("Unexpected code address"); } @@ -2812,7 +2838,7 @@ void CallICStub::Generate(MacroAssembler* masm) { const int generic_offset = FixedArray::OffsetOfElementAt(TypeFeedbackVector::kGenericCountIndex); Label extra_checks_or_miss, slow_start; - Label slow, wrap, cont; + Label slow, non_function, wrap, cont; Label have_js_function; int argc = arg_count(); ParameterCount actual(argc); @@ -2866,7 +2892,7 @@ void CallICStub::Generate(MacroAssembler* masm) { __ InvokeFunction(a1, actual, JUMP_FUNCTION, NullCallWrapper()); __ bind(&slow); - EmitSlowCase(masm, argc); + EmitSlowCase(masm, argc, &non_function); if (CallAsMethod()) { __ bind(&wrap); @@ -2953,8 +2979,8 @@ void CallICStub::Generate(MacroAssembler* masm) { // the slow case __ bind(&slow_start); // Check that the function is really a JavaScript function. - // a1: pushed function (to be verified) - __ JumpIfSmi(a1, &slow); + // r1: pushed function (to be verified) + __ JumpIfSmi(a1, &non_function); // Goto slow case if we do not have a function. __ GetObjectType(a1, a4, a4); diff --git a/src/mips64/macro-assembler-mips64.cc b/src/mips64/macro-assembler-mips64.cc index 4ccfb1910..a379d847a 100644 --- a/src/mips64/macro-assembler-mips64.cc +++ b/src/mips64/macro-assembler-mips64.cc @@ -4874,12 +4874,6 @@ void MacroAssembler::LoadContext(Register dst, int context_chain_length) { } -void MacroAssembler::LoadGlobalProxy(Register dst) { - ld(dst, GlobalObjectOperand()); - ld(dst, FieldMemOperand(dst, GlobalObject::kGlobalProxyOffset)); -} - - void MacroAssembler::LoadTransitionedArrayMapConditional( ElementsKind expected_kind, ElementsKind transitioned_kind, @@ -5373,19 +5367,6 @@ void MacroAssembler::AssertName(Register object) { } -void MacroAssembler::AssertFunction(Register object) { - if (emit_debug_code()) { - STATIC_ASSERT(kSmiTag == 0); - SmiTst(object, t0); - Check(ne, kOperandIsASmiAndNotAFunction, t0, Operand(zero_reg)); - push(object); - GetObjectType(object, object, object); - pop(object); - Check(ne, kOperandIsNotAFunction, object, Operand(JS_FUNCTION_TYPE)); - } -} - - void MacroAssembler::AssertUndefinedOrAllocationSite(Register object, Register scratch) { if (emit_debug_code()) { diff --git a/src/mips64/macro-assembler-mips64.h b/src/mips64/macro-assembler-mips64.h index b374d43b7..17e0857ba 100644 --- a/src/mips64/macro-assembler-mips64.h +++ b/src/mips64/macro-assembler-mips64.h @@ -313,24 +313,6 @@ class MacroAssembler: public Assembler { void Load(Register dst, const MemOperand& src, Representation r); void Store(Register src, const MemOperand& dst, Representation r); - void PushRoot(Heap::RootListIndex index) { - LoadRoot(at, index); - Push(at); - } - - // Compare the object in a register to a value and jump if they are equal. - void JumpIfRoot(Register with, Heap::RootListIndex index, Label* if_equal) { - LoadRoot(at, index); - Branch(if_equal, eq, with, Operand(at)); - } - - // Compare the object in a register to a value and jump if they are not equal. - void JumpIfNotRoot(Register with, Heap::RootListIndex index, - Label* if_not_equal) { - LoadRoot(at, index); - Branch(if_not_equal, ne, with, Operand(at)); - } - // Load an object from the root table. void LoadRoot(Register destination, Heap::RootListIndex index); @@ -971,9 +953,6 @@ class MacroAssembler: public Assembler { void LoadContext(Register dst, int context_chain_length); - // Load the global proxy from the current context. - void LoadGlobalProxy(Register dst); - // Conditionally load the cached Array transitioned map of type // transitioned_kind from the native context if the map in register // map_in_out is the cached Array map in the native context of @@ -1578,9 +1557,6 @@ const Operand& rt = Operand(zero_reg), BranchDelaySlot bd = PROTECT // Abort execution if argument is not a name, enabled via --debug-code. void AssertName(Register object); - // Abort execution if argument is not a JSFunction, enabled via --debug-code. - void AssertFunction(Register object); - // Abort execution if argument is not undefined or an AllocationSite, enabled // via --debug-code. void AssertUndefinedOrAllocationSite(Register object, Register scratch); diff --git a/src/objects.h b/src/objects.h index dd43f0fe5..4ece658d3 100644 --- a/src/objects.h +++ b/src/objects.h @@ -6705,12 +6705,12 @@ class SharedFunctionInfo: public HeapObject { kAllowLazyCompilation, kAllowLazyCompilationWithoutContext, kOptimizationDisabled, - kNative, kStrictModeFunction, kStrongModeFunction, kUsesArguments, kNeedsHomeObject, kHasDuplicateParameters, + kNative, kForceInline, kBoundFunction, kIsAnonymous, diff --git a/src/runtime.js b/src/runtime.js index 82c668a6e..b2e902288 100644 --- a/src/runtime.js +++ b/src/runtime.js @@ -424,12 +424,26 @@ function SHR_STRONG(y) { ----------------------------- */ +function CALL_NON_FUNCTION() { + var delegate = %GetFunctionDelegate(this); + return %Apply(delegate, this, arguments, 0, %_ArgumentsLength()); +} + + function CALL_NON_FUNCTION_AS_CONSTRUCTOR() { var delegate = %GetConstructorDelegate(this); return %Apply(delegate, this, arguments, 0, %_ArgumentsLength()); } +function CALL_FUNCTION_PROXY() { + var arity = %_ArgumentsLength() - 1; + var proxy = %_Arguments(arity); // The proxy comes in as an additional arg. + var trap = %GetCallTrap(proxy); + return %Apply(trap, this, arguments, 0, arity); +} + + function CALL_FUNCTION_PROXY_AS_CONSTRUCTOR () { var proxy = this; var trap = %GetConstructTrap(proxy); @@ -439,19 +453,13 @@ function CALL_FUNCTION_PROXY_AS_CONSTRUCTOR () { function APPLY_PREPARE(args) { var length; - - // First check that the receiver is callable. - if (!IS_CALLABLE(this)) { - throw %make_type_error(kApplyNonFunction, %to_string_fun(this), - typeof this); - } - // First check whether length is a positive Smi and args is an // array. This is the fast case. If this fails, we do the slow case // that takes care of more eventualities. if (IS_ARRAY(args)) { length = args.length; - if (%_IsSmi(length) && length >= 0 && length < kSafeArgumentsLength) { + if (%_IsSmi(length) && length >= 0 && length < kSafeArgumentsLength && + IS_CALLABLE(this)) { return length; } } @@ -463,6 +471,11 @@ function APPLY_PREPARE(args) { // multiplying with pointer size. if (length > kSafeArgumentsLength) throw %make_range_error(kStackOverflow); + if (!IS_CALLABLE(this)) { + throw %make_type_error(kApplyNonFunction, %to_string_fun(this), + typeof this); + } + // Make sure the arguments list has the right type. if (args != null && !IS_SPEC_OBJECT(args)) { throw %make_type_error(kWrongArgs, "Function.prototype.apply"); @@ -476,23 +489,21 @@ function APPLY_PREPARE(args) { function REFLECT_APPLY_PREPARE(args) { var length; - - // First check that the receiver is callable. - if (!IS_CALLABLE(this)) { - throw %make_type_error(kApplyNonFunction, %to_string_fun(this), - typeof this); - } - // First check whether length is a positive Smi and args is an // array. This is the fast case. If this fails, we do the slow case // that takes care of more eventualities. if (IS_ARRAY(args)) { length = args.length; - if (%_IsSmi(length) && length >= 0 && length < kSafeArgumentsLength) { + if (%_IsSmi(length) && length >= 0 && length < kSafeArgumentsLength && + IS_CALLABLE(this)) { return length; } } + if (!IS_CALLABLE(this)) { + throw %make_type_error(kCalledNonCallable, %to_string_fun(this)); + } + if (!IS_SPEC_OBJECT(args)) { throw %make_type_error(kWrongArgs, "Reflect.apply"); } @@ -791,7 +802,9 @@ $toString = ToString; "bit_xor_builtin", BIT_XOR, "bit_xor_strong_builtin", BIT_XOR_STRONG, "call_function_proxy_as_constructor_builtin", CALL_FUNCTION_PROXY_AS_CONSTRUCTOR, + "call_function_proxy_builtin", CALL_FUNCTION_PROXY, "call_non_function_as_constructor_builtin", CALL_NON_FUNCTION_AS_CONSTRUCTOR, + "call_non_function_builtin", CALL_NON_FUNCTION, "compare_builtin", COMPARE, "compare_strong_builtin", COMPARE_STRONG, "concat_iterable_to_array_builtin", CONCAT_ITERABLE_TO_ARRAY, diff --git a/src/runtime/runtime-function.cc b/src/runtime/runtime-function.cc index 51befeedb..a00ca4b73 100644 --- a/src/runtime/runtime-function.cc +++ b/src/runtime/runtime-function.cc @@ -16,9 +16,6 @@ namespace v8 { namespace internal { -// TODO(bmeurer): This is an awful hack resulting from our inability to decide -// who's responsible for doing the receiver patching. By any means, we really -// need to kill this runtime function and just do things right instead!! RUNTIME_FUNCTION(Runtime_IsSloppyModeFunction) { SealHandleScope shs(isolate); DCHECK(args.length() == 1); diff --git a/src/x64/builtins-x64.cc b/src/x64/builtins-x64.cc index 9012cab3b..eaae88aa3 100644 --- a/src/x64/builtins-x64.cc +++ b/src/x64/builtins-x64.cc @@ -1003,7 +1003,6 @@ void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) { } -// static void Builtins::Generate_FunctionCall(MacroAssembler* masm) { // Stack Layout: // rsp[0] : Return address @@ -1011,46 +1010,162 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) { // rsp[16] : Argument n-1 // ... // rsp[8 * n] : Argument 1 - // rsp[8 * (n + 1)] : Receiver (callable to call) + // rsp[8 * (n + 1)] : Receiver (function to call) // // rax contains the number of arguments, n, not counting the receiver. // // 1. Make sure we have at least one argument. - { - Label done; + { Label done; __ testp(rax, rax); - __ j(not_zero, &done, Label::kNear); + __ j(not_zero, &done); __ PopReturnAddressTo(rbx); - __ PushRoot(Heap::kUndefinedValueRootIndex); + __ Push(masm->isolate()->factory()->undefined_value()); __ PushReturnAddressFrom(rbx); __ incp(rax); __ bind(&done); } - // 2. Get the callable to call (passed as receiver) from the stack. - { - StackArgumentsAccessor args(rsp, rax); + // 2. Get the function to call (passed as receiver) from the stack, check + // if it is a function. + Label slow, non_function; + StackArgumentsAccessor args(rsp, rax); + __ movp(rdi, args.GetReceiverOperand()); + __ JumpIfSmi(rdi, &non_function); + __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); + __ j(not_equal, &slow); + + // 3a. Patch the first argument if necessary when calling a function. + Label shift_arguments; + __ Set(rdx, 0); // indicate regular JS_FUNCTION + { Label convert_to_object, use_global_proxy, patch_receiver; + // Change context eagerly in case we need the global receiver. + __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); + + // Do not transform the receiver for strict mode functions. + __ movp(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); + __ testb(FieldOperand(rbx, SharedFunctionInfo::kStrictModeByteOffset), + Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte)); + __ j(not_equal, &shift_arguments); + + // Do not transform the receiver for natives. + // SharedFunctionInfo is already loaded into rbx. + __ testb(FieldOperand(rbx, SharedFunctionInfo::kNativeByteOffset), + Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte)); + __ j(not_zero, &shift_arguments); + + // Compute the receiver in sloppy mode. + __ movp(rbx, args.GetArgumentOperand(1)); + __ JumpIfSmi(rbx, &convert_to_object, Label::kNear); + + __ CompareRoot(rbx, Heap::kNullValueRootIndex); + __ j(equal, &use_global_proxy); + __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex); + __ j(equal, &use_global_proxy); + + STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); + __ CmpObjectType(rbx, FIRST_SPEC_OBJECT_TYPE, rcx); + __ j(above_equal, &shift_arguments); + + __ bind(&convert_to_object); + { + // Enter an internal frame in order to preserve argument count. + FrameScope scope(masm, StackFrame::INTERNAL); + __ Integer32ToSmi(rax, rax); + __ Push(rax); + + __ movp(rax, rbx); + ToObjectStub stub(masm->isolate()); + __ CallStub(&stub); + __ movp(rbx, rax); + __ Set(rdx, 0); // indicate regular JS_FUNCTION + + __ Pop(rax); + __ SmiToInteger32(rax, rax); + } + + // Restore the function to rdi. __ movp(rdi, args.GetReceiverOperand()); + __ jmp(&patch_receiver, Label::kNear); + + __ bind(&use_global_proxy); + __ movp(rbx, + Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); + __ movp(rbx, FieldOperand(rbx, GlobalObject::kGlobalProxyOffset)); + + __ bind(&patch_receiver); + __ movp(args.GetArgumentOperand(1), rbx); + + __ jmp(&shift_arguments); } - // 3. Shift arguments and return address one slot down on the stack + // 3b. Check for function proxy. + __ bind(&slow); + __ Set(rdx, 1); // indicate function proxy + __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE); + __ j(equal, &shift_arguments); + __ bind(&non_function); + __ Set(rdx, 2); // indicate non-function + + // 3c. Patch the first argument when calling a non-function. The + // CALL_NON_FUNCTION builtin expects the non-function callee as + // receiver, so overwrite the first argument which will ultimately + // become the receiver. + __ movp(args.GetArgumentOperand(1), rdi); + + // 4. Shift arguments and return address one slot down on the stack // (overwriting the original receiver). Adjust argument count to make // the original first argument the new receiver. - { - Label loop; + __ bind(&shift_arguments); + { Label loop; __ movp(rcx, rax); StackArgumentsAccessor args(rsp, rcx); __ bind(&loop); __ movp(rbx, args.GetArgumentOperand(1)); __ movp(args.GetArgumentOperand(0), rbx); __ decp(rcx); - __ j(not_zero, &loop); // While non-zero. + __ j(not_zero, &loop); // While non-zero. __ DropUnderReturnAddress(1, rbx); // Drop one slot under return address. __ decp(rax); // One fewer argument (first argument is new receiver). } - // 4. Call the callable. - __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); + // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin, + // or a function proxy via CALL_FUNCTION_PROXY. + { Label function, non_proxy; + __ testp(rdx, rdx); + __ j(zero, &function); + __ Set(rbx, 0); + __ cmpp(rdx, Immediate(1)); + __ j(not_equal, &non_proxy); + + __ PopReturnAddressTo(rdx); + __ Push(rdi); // re-add proxy object as additional argument + __ PushReturnAddressFrom(rdx); + __ incp(rax); + __ GetBuiltinEntry(rdx, Context::CALL_FUNCTION_PROXY_BUILTIN_INDEX); + __ jmp(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), + RelocInfo::CODE_TARGET); + + __ bind(&non_proxy); + __ GetBuiltinEntry(rdx, Context::CALL_NON_FUNCTION_BUILTIN_INDEX); + __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), + RelocInfo::CODE_TARGET); + __ bind(&function); + } + + // 5b. Get the code to call from the function and check that the number of + // expected arguments matches what we're providing. If so, jump + // (tail-call) to the code in register edx without checking arguments. + __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); + __ LoadSharedFunctionInfoSpecialField(rbx, rdx, + SharedFunctionInfo::kFormalParameterCountOffset); + __ movp(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset)); + __ cmpp(rax, rbx); + __ j(not_equal, + masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), + RelocInfo::CODE_TARGET); + + ParameterCount expected(0); + __ InvokeCode(rdx, expected, expected, JUMP_FUNCTION, NullCallWrapper()); } @@ -1136,22 +1251,90 @@ static void Generate_ApplyHelper(MacroAssembler* masm, bool targetIsArgument) { Generate_CheckStackOverflow(masm, kFunctionOffset, kRaxIsSmiTagged); - // Push current index and limit, and receiver. + // Push current index and limit. const int kLimitOffset = StandardFrameConstants::kExpressionsOffset - 1 * kPointerSize; const int kIndexOffset = kLimitOffset - 1 * kPointerSize; - __ Push(rax); // limit - __ Push(Immediate(0)); // index - __ Push(Operand(rbp, kReceiverOffset)); // receiver + __ Push(rax); // limit + __ Push(Immediate(0)); // index + + // Get the receiver. + __ movp(rbx, Operand(rbp, kReceiverOffset)); + + // Check that the function is a JS function (otherwise it must be a proxy). + Label push_receiver; + __ movp(rdi, Operand(rbp, kFunctionOffset)); + __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); + __ j(not_equal, &push_receiver); + + // Change context eagerly to get the right global object if necessary. + __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); + + // Do not transform the receiver for strict mode functions. + Label call_to_object, use_global_proxy; + __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); + __ testb(FieldOperand(rdx, SharedFunctionInfo::kStrictModeByteOffset), + Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte)); + __ j(not_equal, &push_receiver); + + // Do not transform the receiver for natives. + __ testb(FieldOperand(rdx, SharedFunctionInfo::kNativeByteOffset), + Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte)); + __ j(not_equal, &push_receiver); + + // Compute the receiver in sloppy mode. + __ JumpIfSmi(rbx, &call_to_object, Label::kNear); + __ CompareRoot(rbx, Heap::kNullValueRootIndex); + __ j(equal, &use_global_proxy); + __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex); + __ j(equal, &use_global_proxy); + + // If given receiver is already a JavaScript object then there's no + // reason for converting it. + STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE); + __ CmpObjectType(rbx, FIRST_SPEC_OBJECT_TYPE, rcx); + __ j(above_equal, &push_receiver); + + // Convert the receiver to an object. + __ bind(&call_to_object); + __ movp(rax, rbx); + ToObjectStub stub(masm->isolate()); + __ CallStub(&stub); + __ movp(rbx, rax); + __ jmp(&push_receiver, Label::kNear); + + __ bind(&use_global_proxy); + __ movp(rbx, + Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); + __ movp(rbx, FieldOperand(rbx, GlobalObject::kGlobalProxyOffset)); + + // Push the receiver. + __ bind(&push_receiver); + __ Push(rbx); // Loop over the arguments array, pushing each value to the stack - Generate_PushAppliedArguments(masm, kArgumentsOffset, kIndexOffset, - kLimitOffset); + Generate_PushAppliedArguments( + masm, kArgumentsOffset, kIndexOffset, kLimitOffset); - // Call the callable. - // TODO(bmeurer): This should be a tail call according to ES6. + // Call the function. + Label call_proxy; + ParameterCount actual(rax); __ movp(rdi, Operand(rbp, kFunctionOffset)); - __ Call(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); + __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); + __ j(not_equal, &call_proxy); + __ InvokeFunction(rdi, actual, CALL_FUNCTION, NullCallWrapper()); + + frame_scope.GenerateLeaveFrame(); + __ ret(kStackSize * kPointerSize); // remove this, receiver, and arguments + + // Call the function proxy. + __ bind(&call_proxy); + __ Push(rdi); // add function proxy as last argument + __ incp(rax); + __ Set(rbx, 0); + __ GetBuiltinEntry(rdx, Context::CALL_FUNCTION_PROXY_BUILTIN_INDEX); + __ call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), + RelocInfo::CODE_TARGET); // Leave internal frame. } @@ -1625,147 +1808,6 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { } -// static -void Builtins::Generate_CallFunction(MacroAssembler* masm) { - // ----------- S t a t e ------------- - // -- rax : the number of arguments (not including the receiver) - // -- rdi : the function to call (checked to be a JSFunction) - // ----------------------------------- - - Label convert, convert_global_proxy, convert_to_object, done_convert; - StackArgumentsAccessor args(rsp, rax); - __ AssertFunction(rdi); - // TODO(bmeurer): Throw a TypeError if function's [[FunctionKind]] internal - // slot is "classConstructor". - // Enter the context of the function; ToObject has to run in the function - // context, and we also need to take the global proxy from the function - // context in case of conversion. - // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList) - STATIC_ASSERT(SharedFunctionInfo::kNativeByteOffset == - SharedFunctionInfo::kStrictModeByteOffset); - __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); - __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); - // We need to convert the receiver for non-native sloppy mode functions. - __ testb(FieldOperand(rdx, SharedFunctionInfo::kNativeByteOffset), - Immediate((1 << SharedFunctionInfo::kNativeBitWithinByte) | - (1 << SharedFunctionInfo::kStrictModeBitWithinByte))); - __ j(not_zero, &done_convert); - { - __ movp(rcx, args.GetReceiverOperand()); - - // ----------- S t a t e ------------- - // -- rax : the number of arguments (not including the receiver) - // -- rcx : the receiver - // -- rdx : the shared function info. - // -- rdi : the function to call (checked to be a JSFunction) - // -- rsi : the function context. - // ----------------------------------- - - Label convert_receiver; - __ JumpIfSmi(rcx, &convert_to_object, Label::kNear); - STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE); - __ CmpObjectType(rcx, FIRST_JS_RECEIVER_TYPE, rbx); - __ j(above_equal, &done_convert); - __ JumpIfRoot(rcx, Heap::kUndefinedValueRootIndex, &convert_global_proxy, - Label::kNear); - __ JumpIfNotRoot(rcx, Heap::kNullValueRootIndex, &convert_to_object, - Label::kNear); - __ bind(&convert_global_proxy); - { - // Patch receiver to global proxy. - __ LoadGlobalProxy(rcx); - } - __ jmp(&convert_receiver); - __ bind(&convert_to_object); - { - // Convert receiver using ToObject. - // TODO(bmeurer): Inline the allocation here to avoid building the frame - // in the fast case? (fall back to AllocateInNewSpace?) - FrameScope scope(masm, StackFrame::INTERNAL); - __ Integer32ToSmi(rax, rax); - __ Push(rax); - __ Push(rdi); - __ movp(rax, rcx); - ToObjectStub stub(masm->isolate()); - __ CallStub(&stub); - __ movp(rcx, rax); - __ Pop(rdi); - __ Pop(rax); - __ SmiToInteger32(rax, rax); - } - __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); - __ bind(&convert_receiver); - __ movp(args.GetReceiverOperand(), rcx); - } - __ bind(&done_convert); - - // ----------- S t a t e ------------- - // -- rax : the number of arguments (not including the receiver) - // -- rdx : the shared function info. - // -- rdi : the function to call (checked to be a JSFunction) - // -- rsi : the function context. - // ----------------------------------- - - __ LoadSharedFunctionInfoSpecialField( - rbx, rdx, SharedFunctionInfo::kFormalParameterCountOffset); - __ movp(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset)); - ParameterCount actual(rax); - ParameterCount expected(rbx); - __ InvokeCode(rdx, expected, actual, JUMP_FUNCTION, NullCallWrapper()); -} - - -// static -void Builtins::Generate_Call(MacroAssembler* masm) { - // ----------- S t a t e ------------- - // -- rax : the number of arguments (not including the receiver) - // -- rdi : the target to call (can be any Object). - // ----------------------------------- - - Label non_smi, non_function; - __ JumpIfSmi(rdi, &non_function); - __ bind(&non_smi); - __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rdx); - __ j(equal, masm->isolate()->builtins()->CallFunction(), - RelocInfo::CODE_TARGET); - __ CmpInstanceType(rdx, JS_FUNCTION_PROXY_TYPE); - __ j(not_equal, &non_function); - - // 1. Call to function proxy. - // TODO(neis): This doesn't match the ES6 spec for [[Call]] on proxies. - __ movp(rdi, FieldOperand(rdi, JSFunctionProxy::kCallTrapOffset)); - __ AssertNotSmi(rdi); - __ jmp(&non_smi); - - // 2. Call to something else, which might have a [[Call]] internal method (if - // not we raise an exception). - __ bind(&non_function); - // TODO(bmeurer): I wonder why we prefer to have slow API calls? This could - // be awesome instead; i.e. a trivial improvement would be to call into the - // runtime and just deal with the API function there instead of returning a - // delegate from a runtime call that just jumps back to the runtime once - // called. Or, bonus points, call directly into the C API function here, as - // we do in some Crankshaft fast cases. - StackArgumentsAccessor args(rsp, rax); - // Overwrite the original receiver with the (original) target. - __ movp(args.GetReceiverOperand(), rdi); - { - // Determine the delegate for the target (if any). - FrameScope scope(masm, StackFrame::INTERNAL); - __ Integer32ToSmi(rax, rax); - __ Push(rax); - __ Push(rdi); - __ CallRuntime(Runtime::kGetFunctionDelegate, 1); - __ movp(rdi, rax); - __ Pop(rax); - __ SmiToInteger32(rax, rax); - } - // The delegate is always a regular function. - __ AssertFunction(rdi); - __ Jump(masm->isolate()->builtins()->CallFunction(), RelocInfo::CODE_TARGET); -} - - void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { // Lookup the function in the JavaScript frame. __ movp(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); diff --git a/src/x64/code-stubs-x64.cc b/src/x64/code-stubs-x64.cc index a814bfb59..55891d51e 100644 --- a/src/x64/code-stubs-x64.cc +++ b/src/x64/code-stubs-x64.cc @@ -1902,10 +1902,36 @@ static void EmitContinueIfStrictOrNative(MacroAssembler* masm, Label* cont) { } -static void EmitSlowCase(MacroAssembler* masm, StackArgumentsAccessor* args, - int argc) { +static void EmitSlowCase(Isolate* isolate, + MacroAssembler* masm, + StackArgumentsAccessor* args, + int argc, + Label* non_function) { + // Check for function proxy. + __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE); + __ j(not_equal, non_function); + __ PopReturnAddressTo(rcx); + __ Push(rdi); // put proxy as additional argument under return address + __ PushReturnAddressFrom(rcx); + __ Set(rax, argc + 1); + __ Set(rbx, 0); + __ GetBuiltinEntry(rdx, Context::CALL_FUNCTION_PROXY_BUILTIN_INDEX); + { + Handle adaptor = + masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); + __ jmp(adaptor, RelocInfo::CODE_TARGET); + } + + // CALL_NON_FUNCTION expects the non-function callee as receiver (instead + // of the original receiver from the call site). + __ bind(non_function); + __ movp(args->GetReceiverOperand(), rdi); __ Set(rax, argc); - __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); + __ Set(rbx, 0); + __ GetBuiltinEntry(rdx, Context::CALL_NON_FUNCTION_BUILTIN_INDEX); + Handle adaptor = + isolate->builtins()->ArgumentsAdaptorTrampoline(); + __ Jump(adaptor, RelocInfo::CODE_TARGET); } @@ -1930,12 +1956,13 @@ static void CallFunctionNoFeedback(MacroAssembler* masm, // rdi : the function to call // wrap_and_call can only be true if we are compiling a monomorphic method. - Label slow, wrap, cont; + Isolate* isolate = masm->isolate(); + Label slow, non_function, wrap, cont; StackArgumentsAccessor args(rsp, argc); if (needs_checks) { // Check that the function really is a JavaScript function. - __ JumpIfSmi(rdi, &slow); + __ JumpIfSmi(rdi, &non_function); // Goto slow case if we do not have a function. __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); @@ -1970,7 +1997,7 @@ static void CallFunctionNoFeedback(MacroAssembler* masm, if (needs_checks) { // Slow-case: Non-function called. __ bind(&slow); - EmitSlowCase(masm, &args, argc); + EmitSlowCase(isolate, masm, &args, argc, &non_function); } if (call_as_method) { @@ -2086,28 +2113,32 @@ void CallIC_ArrayStub::Generate(MacroAssembler* masm) { __ movp(rcx, FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize)); // Verify that ecx contains an AllocationSite - __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset), - Heap::kAllocationSiteMapRootIndex); - __ j(not_equal, &miss, Label::kNear); + Factory* factory = masm->isolate()->factory(); + __ Cmp(FieldOperand(rcx, HeapObject::kMapOffset), + factory->allocation_site_map()); + __ j(not_equal, &miss); // Increment the call count for monomorphic function calls. - { - __ SmiAddConstant(FieldOperand(rbx, rdx, times_pointer_size, - FixedArray::kHeaderSize + kPointerSize), - Smi::FromInt(CallICNexus::kCallCountIncrement)); + __ SmiAddConstant(FieldOperand(rbx, rdx, times_pointer_size, + FixedArray::kHeaderSize + kPointerSize), + Smi::FromInt(CallICNexus::kCallCountIncrement)); - __ movp(rbx, rcx); - __ movp(rdx, rdi); - ArrayConstructorStub stub(masm->isolate(), arg_count()); - __ TailCallStub(&stub); - } + __ movp(rbx, rcx); + __ movp(rdx, rdi); + ArrayConstructorStub stub(masm->isolate(), arg_count()); + __ TailCallStub(&stub); __ bind(&miss); GenerateMiss(masm); // The slow case, we need this no matter what to complete a call after a miss. - __ Set(rax, arg_count()); - __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); + CallFunctionNoFeedback(masm, + arg_count(), + true, + CallAsMethod()); + + // Unreachable. + __ int3(); } @@ -2121,7 +2152,7 @@ void CallICStub::Generate(MacroAssembler* masm) { const int generic_offset = FixedArray::OffsetOfElementAt(TypeFeedbackVector::kGenericCountIndex); Label extra_checks_or_miss, slow_start; - Label slow, wrap, cont; + Label slow, non_function, wrap, cont; Label have_js_function; int argc = arg_count(); StackArgumentsAccessor args(rsp, argc); @@ -2176,7 +2207,7 @@ void CallICStub::Generate(MacroAssembler* masm) { __ InvokeFunction(rdi, actual, JUMP_FUNCTION, NullCallWrapper()); __ bind(&slow); - EmitSlowCase(masm, &args, argc); + EmitSlowCase(isolate, masm, &args, argc, &non_function); if (CallAsMethod()) { __ bind(&wrap); @@ -2257,7 +2288,7 @@ void CallICStub::Generate(MacroAssembler* masm) { // the slow case __ bind(&slow_start); // Check that function is not a smi. - __ JumpIfSmi(rdi, &slow); + __ JumpIfSmi(rdi, &non_function); // Check that function is a JSFunction. __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); __ j(not_equal, &slow); diff --git a/src/x64/macro-assembler-x64.cc b/src/x64/macro-assembler-x64.cc index 59482bea1..c67d955a8 100644 --- a/src/x64/macro-assembler-x64.cc +++ b/src/x64/macro-assembler-x64.cc @@ -3394,18 +3394,6 @@ void MacroAssembler::AssertName(Register object) { } -void MacroAssembler::AssertFunction(Register object) { - if (emit_debug_code()) { - testb(object, Immediate(kSmiTagMask)); - Check(not_equal, kOperandIsASmiAndNotAFunction); - Push(object); - CmpObjectType(object, JS_FUNCTION_TYPE, object); - Pop(object); - Check(not_equal, kOperandIsNotAFunction); - } -} - - void MacroAssembler::AssertUndefinedOrAllocationSite(Register object) { if (emit_debug_code()) { Label done_checking; @@ -4555,12 +4543,6 @@ void MacroAssembler::LoadContext(Register dst, int context_chain_length) { } -void MacroAssembler::LoadGlobalProxy(Register dst) { - movp(dst, GlobalObjectOperand()); - movp(dst, FieldOperand(dst, GlobalObject::kGlobalProxyOffset)); -} - - void MacroAssembler::LoadTransitionedArrayMapConditional( ElementsKind expected_kind, ElementsKind transitioned_kind, diff --git a/src/x64/macro-assembler-x64.h b/src/x64/macro-assembler-x64.h index cce88d8cc..63feb8284 100644 --- a/src/x64/macro-assembler-x64.h +++ b/src/x64/macro-assembler-x64.h @@ -148,21 +148,6 @@ class MacroAssembler: public Assembler { void CompareRoot(const Operand& with, Heap::RootListIndex index); void PushRoot(Heap::RootListIndex index); - // Compare the object in a register to a value and jump if they are equal. - void JumpIfRoot(Register with, Heap::RootListIndex index, Label* if_equal, - Label::Distance if_equal_distance = Label::kNear) { - CompareRoot(with, index); - j(equal, if_equal, if_equal_distance); - } - - // Compare the object in a register to a value and jump if they are not equal. - void JumpIfNotRoot(Register with, Heap::RootListIndex index, - Label* if_not_equal, - Label::Distance if_not_equal_distance = Label::kNear) { - CompareRoot(with, index); - j(not_equal, if_not_equal, if_not_equal_distance); - } - // These functions do not arrange the registers in any particular order so // they are not useful for calls that can cause a GC. The caller can // exclude up to 3 registers that do not need to be saved and restored. @@ -1116,9 +1101,6 @@ class MacroAssembler: public Assembler { // Abort execution if argument is not a name, enabled via --debug-code. void AssertName(Register object); - // Abort execution if argument is not a JSFunction, enabled via --debug-code. - void AssertFunction(Register object); - // Abort execution if argument is not undefined or an AllocationSite, enabled // via --debug-code. void AssertUndefinedOrAllocationSite(Register object); @@ -1271,9 +1253,6 @@ class MacroAssembler: public Assembler { // Find the function context up the context chain. void LoadContext(Register dst, int context_chain_length); - // Load the global proxy from the current context. - void LoadGlobalProxy(Register dst); - // Conditionally load the cached Array transitioned map of type // transitioned_kind from the native context if the map in register // map_in_out is the cached Array map in the native context of