1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 #include "src/code-factory.h"
8 #include "src/codegen.h"
9 #include "src/deoptimizer.h"
10 #include "src/full-codegen/full-codegen.h"
16 #define __ ACCESS_MASM(masm)
19 void Builtins::Generate_Adaptor(MacroAssembler* masm,
21 BuiltinExtraArguments extra_args) {
22 // ----------- S t a t e -------------
23 // -- rax : number of arguments excluding receiver
24 // -- rdi : called function (only guaranteed when
25 // extra_args requires it)
27 // -- rsp[0] : return address
28 // -- rsp[8] : last argument
30 // -- rsp[8 * argc] : first argument (argc == rax)
31 // -- rsp[8 * (argc + 1)] : receiver
32 // -----------------------------------
34 // Insert extra arguments.
35 int num_extra_args = 0;
36 if (extra_args == NEEDS_CALLED_FUNCTION) {
38 __ PopReturnAddressTo(kScratchRegister);
40 __ PushReturnAddressFrom(kScratchRegister);
42 DCHECK(extra_args == NO_EXTRA_ARGUMENTS);
45 // JumpToExternalReference expects rax to contain the number of arguments
46 // including the receiver and the extra arguments.
47 __ addp(rax, Immediate(num_extra_args + 1));
48 __ JumpToExternalReference(ExternalReference(id, masm->isolate()), 1);
52 static void CallRuntimePassFunction(
53 MacroAssembler* masm, Runtime::FunctionId function_id) {
54 FrameScope scope(masm, StackFrame::INTERNAL);
55 // Push a copy of the function onto the stack.
57 // Function is also the parameter to the runtime call.
60 __ CallRuntime(function_id, 1);
66 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
67 __ movp(kScratchRegister,
68 FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
69 __ movp(kScratchRegister,
70 FieldOperand(kScratchRegister, SharedFunctionInfo::kCodeOffset));
71 __ leap(kScratchRegister, FieldOperand(kScratchRegister, Code::kHeaderSize));
72 __ jmp(kScratchRegister);
76 static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
77 __ leap(rax, FieldOperand(rax, Code::kHeaderSize));
82 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
83 // Checking whether the queued function is ready for install is optional,
84 // since we come across interrupts and stack checks elsewhere. However,
85 // not checking may delay installing ready functions, and always checking
86 // would be quite expensive. A good compromise is to first check against
87 // stack limit as a cue for an interrupt signal.
89 __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
90 __ j(above_equal, &ok);
92 CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode);
93 GenerateTailCallToReturnedCode(masm);
96 GenerateTailCallToSharedCode(masm);
100 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
101 bool is_api_function,
102 bool create_memento) {
103 // ----------- S t a t e -------------
104 // -- rax: number of arguments
105 // -- rdi: constructor function
106 // -- rbx: allocation site or undefined
107 // -- rdx: original constructor
108 // -----------------------------------
110 // Should never create mementos for api functions.
111 DCHECK(!is_api_function || !create_memento);
113 // Enter a construct frame.
115 FrameScope scope(masm, StackFrame::CONSTRUCT);
117 // Preserve the incoming parameters on the stack.
118 __ AssertUndefinedOrAllocationSite(rbx);
120 __ Integer32ToSmi(rax, rax);
125 // Try to allocate the object without transitioning into C code. If any of
126 // the preconditions is not met, the code bails out to the runtime call.
127 Label rt_call, allocated;
128 if (FLAG_inline_new) {
129 ExternalReference debug_step_in_fp =
130 ExternalReference::debug_step_in_fp_address(masm->isolate());
131 __ Move(kScratchRegister, debug_step_in_fp);
132 __ cmpp(Operand(kScratchRegister, 0), Immediate(0));
133 __ j(not_equal, &rt_call);
135 // Fall back to runtime if the original constructor and function differ.
137 __ j(not_equal, &rt_call);
139 // Verified that the constructor is a JSFunction.
140 // Load the initial map and verify that it is in fact a map.
142 __ movp(rax, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
143 // Will both indicate a NULL and a Smi
144 DCHECK(kSmiTag == 0);
145 __ JumpIfSmi(rax, &rt_call);
147 // rax: initial map (if proven valid below)
148 __ CmpObjectType(rax, MAP_TYPE, rbx);
149 __ j(not_equal, &rt_call);
151 // Check that the constructor is not constructing a JSFunction (see
152 // comments in Runtime_NewObject in runtime.cc). In which case the
153 // initial map's instance type would be JS_FUNCTION_TYPE.
156 __ CmpInstanceType(rax, JS_FUNCTION_TYPE);
157 __ j(equal, &rt_call);
158 if (!is_api_function) {
160 // The code below relies on these assumptions.
161 STATIC_ASSERT(Map::Counter::kShift + Map::Counter::kSize == 32);
162 // Check if slack tracking is enabled.
163 __ movl(rsi, FieldOperand(rax, Map::kBitField3Offset));
164 __ shrl(rsi, Immediate(Map::Counter::kShift));
165 __ cmpl(rsi, Immediate(Map::kSlackTrackingCounterEnd));
166 __ j(less, &allocate);
167 // Decrease generous allocation count.
168 __ subl(FieldOperand(rax, Map::kBitField3Offset),
169 Immediate(1 << Map::Counter::kShift));
171 __ cmpl(rsi, Immediate(Map::kSlackTrackingCounterEnd));
172 __ j(not_equal, &allocate);
178 __ Push(rdi); // constructor
179 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
184 __ movl(rsi, Immediate(Map::kSlackTrackingCounterEnd - 1));
189 // Now allocate the JSObject on the heap.
190 __ movzxbp(rdi, FieldOperand(rax, Map::kInstanceSizeOffset));
191 __ shlp(rdi, Immediate(kPointerSizeLog2));
192 if (create_memento) {
193 __ addp(rdi, Immediate(AllocationMemento::kSize));
195 // rdi: size of new object
201 NO_ALLOCATION_FLAGS);
202 Factory* factory = masm->isolate()->factory();
203 // Allocated the JSObject, now initialize the fields.
205 // rbx: JSObject (not HeapObject tagged - the actual address).
206 // rdi: start of next object (including memento if create_memento)
207 __ movp(Operand(rbx, JSObject::kMapOffset), rax);
208 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
209 __ movp(Operand(rbx, JSObject::kPropertiesOffset), rcx);
210 __ movp(Operand(rbx, JSObject::kElementsOffset), rcx);
211 // Set extra fields in the newly allocated object.
214 // rdi: start of next object (including memento if create_memento)
215 // rsi: slack tracking counter (non-API function case)
216 __ leap(rcx, Operand(rbx, JSObject::kHeaderSize));
217 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
218 if (!is_api_function) {
219 Label no_inobject_slack_tracking;
221 // Check if slack tracking is enabled.
222 __ cmpl(rsi, Immediate(Map::kSlackTrackingCounterEnd));
223 __ j(less, &no_inobject_slack_tracking);
225 // Allocate object with a slack.
229 rax, Map::kInObjectPropertiesOrConstructorFunctionIndexOffset));
230 __ movzxbp(rax, FieldOperand(rax, Map::kUnusedPropertyFieldsOffset));
233 Operand(rbx, rsi, times_pointer_size, JSObject::kHeaderSize));
234 // rsi: offset of first field after pre-allocated fields
235 if (FLAG_debug_code) {
237 __ Assert(less_equal,
238 kUnexpectedNumberOfPreAllocatedPropertyFields);
240 __ InitializeFieldsWithFiller(rcx, rsi, rdx);
241 __ LoadRoot(rdx, Heap::kOnePointerFillerMapRootIndex);
242 // Fill the remaining fields with one pointer filler map.
244 __ bind(&no_inobject_slack_tracking);
246 if (create_memento) {
247 __ leap(rsi, Operand(rdi, -AllocationMemento::kSize));
248 __ InitializeFieldsWithFiller(rcx, rsi, rdx);
250 // Fill in memento fields if necessary.
251 // rsi: points to the allocated but uninitialized memento.
252 __ Move(Operand(rsi, AllocationMemento::kMapOffset),
253 factory->allocation_memento_map());
254 // Get the cell or undefined.
255 __ movp(rdx, Operand(rsp, 3 * kPointerSize));
256 __ AssertUndefinedOrAllocationSite(rdx);
257 __ movp(Operand(rsi, AllocationMemento::kAllocationSiteOffset), rdx);
259 __ InitializeFieldsWithFiller(rcx, rdi, rdx);
262 // Add the object tag to make the JSObject real, so that we can continue
263 // and jump into the continuation code at any time from now on.
264 // rbx: JSObject (untagged)
265 __ orp(rbx, Immediate(kHeapObjectTag));
267 // Continue with JSObject being successfully allocated
268 // rbx: JSObject (tagged)
272 // Allocate the new receiver object using the runtime call.
273 // rdx: original constructor
275 int offset = kPointerSize;
276 if (create_memento) {
277 // Get the cell or allocation site.
278 __ movp(rdi, Operand(rsp, kPointerSize * 3));
279 __ Push(rdi); // argument 1: allocation site
280 offset += kPointerSize;
283 // Must restore rsi (context) and rdi (constructor) before calling runtime.
284 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
285 __ movp(rdi, Operand(rsp, offset));
286 __ Push(rdi); // argument 2/1: constructor function
287 __ Push(rdx); // argument 3/2: original constructor
288 if (create_memento) {
289 __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 3);
291 __ CallRuntime(Runtime::kNewObject, 2);
293 __ movp(rbx, rax); // store result in rbx
295 // Runtime_NewObjectWithAllocationSite increments allocation count.
296 // Skip the increment.
297 Label count_incremented;
298 if (create_memento) {
299 __ jmp(&count_incremented);
302 // New object allocated.
303 // rbx: newly allocated object
306 if (create_memento) {
307 __ movp(rcx, Operand(rsp, 3 * kPointerSize));
308 __ Cmp(rcx, masm->isolate()->factory()->undefined_value());
309 __ j(equal, &count_incremented);
310 // rcx is an AllocationSite. We are creating a memento from it, so we
311 // need to increment the memento create count.
313 FieldOperand(rcx, AllocationSite::kPretenureCreateCountOffset),
315 __ bind(&count_incremented);
318 // Restore the parameters.
322 // Retrieve smi-tagged arguments count from the stack.
323 __ movp(rax, Operand(rsp, 0));
324 __ SmiToInteger32(rax, rax);
326 // Push new.target onto the construct frame. This is stored just below the
327 // receiver on the stack.
330 // Push the allocated receiver to the stack. We need two copies
331 // because we may have to return the original one and the calling
332 // conventions dictate that the called function pops the receiver.
336 // Set up pointer to last argument.
337 __ leap(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
339 // Copy arguments and receiver to the expression stack.
344 __ Push(Operand(rbx, rcx, times_pointer_size, 0));
347 __ j(greater_equal, &loop);
349 // Call the function.
350 if (is_api_function) {
351 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
353 masm->isolate()->builtins()->HandleApiCallConstruct();
354 __ Call(code, RelocInfo::CODE_TARGET);
356 ParameterCount actual(rax);
357 __ InvokeFunction(rdi, actual, CALL_FUNCTION, NullCallWrapper());
360 // Store offset of return address for deoptimizer.
361 if (!is_api_function) {
362 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
365 // Restore context from the frame.
366 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
368 // If the result is an object (in the ECMA sense), we should get rid
369 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
371 Label use_receiver, exit;
372 // If the result is a smi, it is *not* an object in the ECMA sense.
373 __ JumpIfSmi(rax, &use_receiver);
375 // If the type of the result (stored in its map) is less than
376 // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
377 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
378 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
379 __ j(above_equal, &exit);
381 // Throw away the result of the constructor invocation and use the
382 // on-stack receiver as the result.
383 __ bind(&use_receiver);
384 __ movp(rax, Operand(rsp, 0));
386 // Restore the arguments count and leave the construct frame. The arguments
387 // count is stored below the reciever and the new.target.
389 __ movp(rbx, Operand(rsp, 2 * kPointerSize));
391 // Leave construct frame.
394 // Remove caller arguments from the stack and return.
395 __ PopReturnAddressTo(rcx);
396 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
397 __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
398 __ PushReturnAddressFrom(rcx);
399 Counters* counters = masm->isolate()->counters();
400 __ IncrementCounter(counters->constructed_objects(), 1);
405 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
406 Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new);
410 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
411 Generate_JSConstructStubHelper(masm, true, false);
415 void Builtins::Generate_JSConstructStubForDerived(MacroAssembler* masm) {
416 // ----------- S t a t e -------------
417 // -- rax: number of arguments
418 // -- rdi: constructor function
419 // -- rbx: allocation site or undefined
420 // -- rdx: original constructor
421 // -----------------------------------
424 FrameScope frame_scope(masm, StackFrame::CONSTRUCT);
426 // Preserve allocation site.
427 __ AssertUndefinedOrAllocationSite(rbx);
430 // Store a smi-tagged arguments count on the stack.
431 __ Integer32ToSmi(rax, rax);
433 __ SmiToInteger32(rax, rax);
438 // receiver is the hole.
439 __ Push(masm->isolate()->factory()->the_hole_value());
441 // Set up pointer to last argument.
442 __ leap(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
444 // Copy arguments and receiver to the expression stack.
449 __ Push(Operand(rbx, rcx, times_pointer_size, 0));
452 __ j(greater_equal, &loop);
456 ExternalReference debug_step_in_fp =
457 ExternalReference::debug_step_in_fp_address(masm->isolate());
458 __ Move(kScratchRegister, debug_step_in_fp);
459 __ cmpp(Operand(kScratchRegister, 0), Immediate(0));
460 __ j(equal, &skip_step_in);
465 __ CallRuntime(Runtime::kHandleStepInForDerivedConstructors, 1);
469 __ bind(&skip_step_in);
471 // Call the function.
472 ParameterCount actual(rax);
473 __ InvokeFunction(rdi, actual, CALL_FUNCTION, NullCallWrapper());
475 // Restore context from the frame.
476 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
478 // Get arguments count, skipping over new.target.
479 __ movp(rbx, Operand(rsp, kPointerSize)); // Get arguments count.
480 } // Leave construct frame.
482 // Remove caller arguments from the stack and return.
483 __ PopReturnAddressTo(rcx);
484 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
485 __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
486 __ PushReturnAddressFrom(rcx);
491 enum IsTagged { kRaxIsSmiTagged, kRaxIsUntaggedInt };
494 // Clobbers rcx, rdx, kScratchRegister; preserves all other registers.
495 static void Generate_CheckStackOverflow(MacroAssembler* masm,
496 const int calleeOffset,
497 IsTagged rax_is_tagged) {
498 // rax : the number of items to be pushed to the stack
500 // Check the stack for overflow. We are not trying to catch
501 // interruptions (e.g. debug break and preemption) here, so the "real stack
502 // limit" is checked.
504 __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
506 // Make rcx the space we have left. The stack might already be overflowed
507 // here which will cause rcx to become negative.
508 __ subp(rcx, kScratchRegister);
509 // Make rdx the space we need for the array when it is unrolled onto the
511 if (rax_is_tagged == kRaxIsSmiTagged) {
512 __ PositiveSmiTimesPowerOfTwoToInteger64(rdx, rax, kPointerSizeLog2);
514 DCHECK(rax_is_tagged == kRaxIsUntaggedInt);
516 __ shlq(rdx, Immediate(kPointerSizeLog2));
518 // Check if the arguments will overflow the stack.
520 __ j(greater, &okay); // Signed comparison.
522 // Out of stack space.
523 __ Push(Operand(rbp, calleeOffset));
524 if (rax_is_tagged == kRaxIsUntaggedInt) {
525 __ Integer32ToSmi(rax, rax);
528 __ InvokeBuiltin(Context::STACK_OVERFLOW_BUILTIN_INDEX, CALL_FUNCTION);
534 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
536 ProfileEntryHookStub::MaybeCallEntryHook(masm);
538 // Expects five C++ function parameters.
539 // - Address entry (ignored)
540 // - JSFunction* function (
541 // - Object* receiver
544 // (see Handle::Invoke in execution.cc).
546 // Open a C++ scope for the FrameScope.
548 // Platform specific argument handling. After this, the stack contains
549 // an internal frame and the pushed function and receiver, and
550 // register rax and rbx holds the argument count and argument array,
551 // while rdi holds the function pointer and rsi the context.
554 // MSVC parameters in:
555 // rcx : entry (ignored)
561 // Clear the context before we push it when entering the internal frame.
563 // Enter an internal frame.
564 FrameScope scope(masm, StackFrame::INTERNAL);
566 // Load the function context into rsi.
567 __ movp(rsi, FieldOperand(rdx, JSFunction::kContextOffset));
569 // Push the function and the receiver onto the stack.
573 // Load the number of arguments and setup pointer to the arguments.
575 // Load the previous frame pointer to access C argument on stack
576 __ movp(kScratchRegister, Operand(rbp, 0));
577 __ movp(rbx, Operand(kScratchRegister, EntryFrameConstants::kArgvOffset));
578 // Load the function pointer into rdi.
581 // GCC parameters in:
582 // rdi : entry (ignored)
591 // Clear the context before we push it when entering the internal frame.
593 // Enter an internal frame.
594 FrameScope scope(masm, StackFrame::INTERNAL);
596 // Push the function and receiver and setup the context.
599 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
601 // Load the number of arguments and setup pointer to the arguments.
606 // Current stack contents:
607 // [rsp + 2 * kPointerSize ... ] : Internal frame
608 // [rsp + kPointerSize] : function
610 // Current register contents:
616 // Check if we have enough stack space to push all arguments.
617 // The function is the first thing that was pushed above after entering
618 // the internal frame.
619 const int kFunctionOffset =
620 InternalFrameConstants::kCodeOffset - kRegisterSize;
621 // Expects argument count in rax. Clobbers rcx, rdx.
622 Generate_CheckStackOverflow(masm, kFunctionOffset, kRaxIsUntaggedInt);
624 // Copy arguments to the stack in a loop.
625 // Register rbx points to array of pointers to handle locations.
626 // Push the values of these handles.
628 __ Set(rcx, 0); // Set loop variable to 0.
631 __ movp(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0));
632 __ Push(Operand(kScratchRegister, 0)); // dereference handle
633 __ addp(rcx, Immediate(1));
636 __ j(not_equal, &loop);
640 // No type feedback cell is available
641 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
642 // Expects rdi to hold function pointer.
643 CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS);
646 ParameterCount actual(rax);
647 // Function must be in rdi.
648 __ InvokeFunction(rdi, actual, CALL_FUNCTION, NullCallWrapper());
650 // Exit the internal frame. Notice that this also removes the empty
651 // context and the function left on the stack by the code
655 // TODO(X64): Is argument correct? Is there a receiver to remove?
656 __ ret(1 * kPointerSize); // Remove receiver.
660 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
661 Generate_JSEntryTrampolineHelper(masm, false);
665 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
666 Generate_JSEntryTrampolineHelper(masm, true);
670 // Generate code for entering a JS function with the interpreter.
671 // On entry to the function the receiver and arguments have been pushed on the
672 // stack left to right. The actual argument count matches the formal parameter
673 // count expected by the function.
675 // The live registers are:
676 // o rdi: the JS function object being called
677 // o rsi: our context
678 // o rbp: the caller's frame pointer
679 // o rsp: stack pointer (pointing to return address)
681 // The function builds a JS frame. Please see JavaScriptFrameConstants in
682 // frames-x64.h for its layout.
683 // TODO(rmcilroy): We will need to include the current bytecode pointer in the
685 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
686 // Open a frame scope to indicate that there is a frame on the stack. The
687 // MANUAL indicates that the scope shouldn't actually generate code to set up
688 // the frame (that is done below).
689 FrameScope frame_scope(masm, StackFrame::MANUAL);
690 __ pushq(rbp); // Caller's frame pointer.
692 __ Push(rsi); // Callee's context.
693 __ Push(rdi); // Callee's JS function.
695 // Get the bytecode array from the function object and load the pointer to the
696 // first entry into edi (InterpreterBytecodeRegister).
697 __ movp(rax, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
698 __ movp(kInterpreterBytecodeArrayRegister,
699 FieldOperand(rax, SharedFunctionInfo::kFunctionDataOffset));
701 if (FLAG_debug_code) {
702 // Check function data field is actually a BytecodeArray object.
703 __ AssertNotSmi(kInterpreterBytecodeArrayRegister);
704 __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
706 __ Assert(equal, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
709 // Allocate the local and temporary register file on the stack.
711 // Load frame size from the BytecodeArray object.
712 __ movl(rcx, FieldOperand(kInterpreterBytecodeArrayRegister,
713 BytecodeArray::kFrameSizeOffset));
715 // Do a stack check to ensure we don't go over the limit.
719 __ CompareRoot(rdx, Heap::kRealStackLimitRootIndex);
720 __ j(above_equal, &ok, Label::kNear);
721 __ InvokeBuiltin(Context::STACK_OVERFLOW_BUILTIN_INDEX, CALL_FUNCTION);
724 // If ok, push undefined as the initial value for all register file entries.
727 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
728 __ j(always, &loop_check);
729 __ bind(&loop_header);
730 // TODO(rmcilroy): Consider doing more than one push per loop iteration.
732 // Continue loop if not done.
733 __ bind(&loop_check);
734 __ subp(rcx, Immediate(kPointerSize));
735 __ j(greater_equal, &loop_header, Label::kNear);
738 // TODO(rmcilroy): List of things not currently dealt with here but done in
739 // fullcodegen's prologue:
740 // - Support profiler (specifically profiling_counter).
741 // - Call ProfileEntryHookStub when isolate has a function_entry_hook.
742 // - Allow simulator stop operations if FLAG_stop_at is set.
743 // - Deal with sloppy mode functions which need to replace the
744 // receiver with the global proxy when called as functions (without an
745 // explicit receiver object).
746 // - Code aging of the BytecodeArray object.
747 // - Supporting FLAG_trace.
749 // The following items are also not done here, and will probably be done using
750 // explicit bytecodes instead:
751 // - Allocating a new local context if applicable.
752 // - Setting up a local binding to the this function, which is used in
753 // derived constructors with super calls.
754 // - Setting new.target if required.
755 // - Dealing with REST parameters (only if
756 // https://codereview.chromium.org/1235153006 doesn't land by then).
757 // - Dealing with argument objects.
759 // Perform stack guard check.
762 __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
763 __ j(above_equal, &ok, Label::kNear);
764 __ CallRuntime(Runtime::kStackGuard, 0);
768 // Load accumulator, register file, bytecode offset, dispatch table into
770 __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
771 __ movp(kInterpreterRegisterFileRegister, rbp);
773 kInterpreterRegisterFileRegister,
774 Immediate(kPointerSize + StandardFrameConstants::kFixedFrameSizeFromFp));
775 __ movp(kInterpreterBytecodeOffsetRegister,
776 Immediate(BytecodeArray::kHeaderSize - kHeapObjectTag));
777 __ LoadRoot(kInterpreterDispatchTableRegister,
778 Heap::kInterpreterTableRootIndex);
779 __ addp(kInterpreterDispatchTableRegister,
780 Immediate(FixedArray::kHeaderSize - kHeapObjectTag));
782 // Dispatch to the first bytecode handler for the function.
783 __ movzxbp(rbx, Operand(kInterpreterBytecodeArrayRegister,
784 kInterpreterBytecodeOffsetRegister, times_1, 0));
785 __ movp(rbx, Operand(kInterpreterDispatchTableRegister, rbx,
786 times_pointer_size, 0));
787 // TODO(rmcilroy): Make dispatch table point to code entrys to avoid untagging
788 // and header removal.
789 __ addp(rbx, Immediate(Code::kHeaderSize - kHeapObjectTag));
794 void Builtins::Generate_InterpreterExitTrampoline(MacroAssembler* masm) {
795 // TODO(rmcilroy): List of things not currently dealt with here but done in
796 // fullcodegen's EmitReturnSequence.
797 // - Supporting FLAG_trace for Runtime::TraceExit.
798 // - Support profiler (specifically decrementing profiling_counter
799 // appropriately and calling out to HandleInterrupts if necessary).
801 // The return value is in accumulator, which is already in rax.
803 // Leave the frame (also dropping the register file).
806 // Drop receiver + arguments and return.
807 __ movl(rbx, FieldOperand(kInterpreterBytecodeArrayRegister,
808 BytecodeArray::kParameterSizeOffset));
809 __ PopReturnAddressTo(rcx);
811 __ PushReturnAddressFrom(rcx);
816 void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
817 CallRuntimePassFunction(masm, Runtime::kCompileLazy);
818 GenerateTailCallToReturnedCode(masm);
822 static void CallCompileOptimized(MacroAssembler* masm,
824 FrameScope scope(masm, StackFrame::INTERNAL);
825 // Push a copy of the function onto the stack.
827 // Function is also the parameter to the runtime call.
829 // Whether to compile in a background thread.
830 __ Push(masm->isolate()->factory()->ToBoolean(concurrent));
832 __ CallRuntime(Runtime::kCompileOptimized, 2);
838 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
839 CallCompileOptimized(masm, false);
840 GenerateTailCallToReturnedCode(masm);
844 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
845 CallCompileOptimized(masm, true);
846 GenerateTailCallToReturnedCode(masm);
850 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
851 // For now, we are relying on the fact that make_code_young doesn't do any
852 // garbage collection which allows us to save/restore the registers without
853 // worrying about which of them contain pointers. We also don't build an
854 // internal frame to make the code faster, since we shouldn't have to do stack
855 // crawls in MakeCodeYoung. This seems a bit fragile.
857 // Re-execute the code that was patched back to the young age when
859 __ subp(Operand(rsp, 0), Immediate(5));
861 __ Move(arg_reg_2, ExternalReference::isolate_address(masm->isolate()));
862 __ movp(arg_reg_1, Operand(rsp, kNumSafepointRegisters * kPointerSize));
864 FrameScope scope(masm, StackFrame::MANUAL);
865 __ PrepareCallCFunction(2);
867 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
874 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
875 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
876 MacroAssembler* masm) { \
877 GenerateMakeCodeYoungAgainCommon(masm); \
879 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
880 MacroAssembler* masm) { \
881 GenerateMakeCodeYoungAgainCommon(masm); \
883 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
884 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
887 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
888 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
889 // that make_code_young doesn't do any garbage collection which allows us to
890 // save/restore the registers without worrying about which of them contain
893 __ Move(arg_reg_2, ExternalReference::isolate_address(masm->isolate()));
894 __ movp(arg_reg_1, Operand(rsp, kNumSafepointRegisters * kPointerSize));
895 __ subp(arg_reg_1, Immediate(Assembler::kShortCallInstructionLength));
897 FrameScope scope(masm, StackFrame::MANUAL);
898 __ PrepareCallCFunction(2);
900 ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
905 // Perform prologue operations usually performed by the young code stub.
906 __ PopReturnAddressTo(kScratchRegister);
907 __ pushq(rbp); // Caller's frame pointer.
909 __ Push(rsi); // Callee's context.
910 __ Push(rdi); // Callee's JS Function.
911 __ PushReturnAddressFrom(kScratchRegister);
913 // Jump to point after the code-age stub.
918 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
919 GenerateMakeCodeYoungAgainCommon(masm);
923 void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
924 Generate_MarkCodeAsExecutedOnce(masm);
928 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
929 SaveFPRegsMode save_doubles) {
930 // Enter an internal frame.
932 FrameScope scope(masm, StackFrame::INTERNAL);
934 // Preserve registers across notification, this is important for compiled
935 // stubs that tail call the runtime on deopts passing their parameters in
938 __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles);
940 // Tear down internal frame.
943 __ DropUnderReturnAddress(1); // Ignore state offset
944 __ ret(0); // Return to IC Miss stub, continuation still on stack.
948 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
949 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
953 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
954 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
958 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
959 Deoptimizer::BailoutType type) {
960 // Enter an internal frame.
962 FrameScope scope(masm, StackFrame::INTERNAL);
964 // Pass the deoptimization type to the runtime system.
965 __ Push(Smi::FromInt(static_cast<int>(type)));
967 __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
968 // Tear down internal frame.
971 // Get the full codegen state from the stack and untag it.
972 __ SmiToInteger32(kScratchRegister, Operand(rsp, kPCOnStackSize));
974 // Switch on the state.
975 Label not_no_registers, not_tos_rax;
976 __ cmpp(kScratchRegister, Immediate(FullCodeGenerator::NO_REGISTERS));
977 __ j(not_equal, ¬_no_registers, Label::kNear);
978 __ ret(1 * kPointerSize); // Remove state.
980 __ bind(¬_no_registers);
981 __ movp(rax, Operand(rsp, kPCOnStackSize + kPointerSize));
982 __ cmpp(kScratchRegister, Immediate(FullCodeGenerator::TOS_REG));
983 __ j(not_equal, ¬_tos_rax, Label::kNear);
984 __ ret(2 * kPointerSize); // Remove state, rax.
986 __ bind(¬_tos_rax);
987 __ Abort(kNoCasesLeft);
991 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
992 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
996 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
997 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1001 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1002 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1006 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
1008 // rsp[0] : Return address
1009 // rsp[8] : Argument n
1010 // rsp[16] : Argument n-1
1012 // rsp[8 * n] : Argument 1
1013 // rsp[8 * (n + 1)] : Receiver (function to call)
1015 // rax contains the number of arguments, n, not counting the receiver.
1017 // 1. Make sure we have at least one argument.
1020 __ j(not_zero, &done);
1021 __ PopReturnAddressTo(rbx);
1022 __ Push(masm->isolate()->factory()->undefined_value());
1023 __ PushReturnAddressFrom(rbx);
1028 // 2. Get the function to call (passed as receiver) from the stack, check
1029 // if it is a function.
1030 Label slow, non_function;
1031 StackArgumentsAccessor args(rsp, rax);
1032 __ movp(rdi, args.GetReceiverOperand());
1033 __ JumpIfSmi(rdi, &non_function);
1034 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
1035 __ j(not_equal, &slow);
1037 // 3a. Patch the first argument if necessary when calling a function.
1038 Label shift_arguments;
1039 __ Set(rdx, 0); // indicate regular JS_FUNCTION
1040 { Label convert_to_object, use_global_proxy, patch_receiver;
1041 // Change context eagerly in case we need the global receiver.
1042 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
1044 // Do not transform the receiver for strict mode functions.
1045 __ movp(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
1046 __ testb(FieldOperand(rbx, SharedFunctionInfo::kStrictModeByteOffset),
1047 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte));
1048 __ j(not_equal, &shift_arguments);
1050 // Do not transform the receiver for natives.
1051 // SharedFunctionInfo is already loaded into rbx.
1052 __ testb(FieldOperand(rbx, SharedFunctionInfo::kNativeByteOffset),
1053 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte));
1054 __ j(not_zero, &shift_arguments);
1056 // Compute the receiver in sloppy mode.
1057 __ movp(rbx, args.GetArgumentOperand(1));
1058 __ JumpIfSmi(rbx, &convert_to_object, Label::kNear);
1060 __ CompareRoot(rbx, Heap::kNullValueRootIndex);
1061 __ j(equal, &use_global_proxy);
1062 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
1063 __ j(equal, &use_global_proxy);
1065 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1066 __ CmpObjectType(rbx, FIRST_SPEC_OBJECT_TYPE, rcx);
1067 __ j(above_equal, &shift_arguments);
1069 __ bind(&convert_to_object);
1071 // Enter an internal frame in order to preserve argument count.
1072 FrameScope scope(masm, StackFrame::INTERNAL);
1073 __ Integer32ToSmi(rax, rax);
1077 ToObjectStub stub(masm->isolate());
1080 __ Set(rdx, 0); // indicate regular JS_FUNCTION
1083 __ SmiToInteger32(rax, rax);
1086 // Restore the function to rdi.
1087 __ movp(rdi, args.GetReceiverOperand());
1088 __ jmp(&patch_receiver, Label::kNear);
1090 __ bind(&use_global_proxy);
1092 Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
1093 __ movp(rbx, FieldOperand(rbx, GlobalObject::kGlobalProxyOffset));
1095 __ bind(&patch_receiver);
1096 __ movp(args.GetArgumentOperand(1), rbx);
1098 __ jmp(&shift_arguments);
1101 // 3b. Check for function proxy.
1103 __ Set(rdx, 1); // indicate function proxy
1104 __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE);
1105 __ j(equal, &shift_arguments);
1106 __ bind(&non_function);
1107 __ Set(rdx, 2); // indicate non-function
1109 // 3c. Patch the first argument when calling a non-function. The
1110 // CALL_NON_FUNCTION builtin expects the non-function callee as
1111 // receiver, so overwrite the first argument which will ultimately
1112 // become the receiver.
1113 __ movp(args.GetArgumentOperand(1), rdi);
1115 // 4. Shift arguments and return address one slot down on the stack
1116 // (overwriting the original receiver). Adjust argument count to make
1117 // the original first argument the new receiver.
1118 __ bind(&shift_arguments);
1121 StackArgumentsAccessor args(rsp, rcx);
1123 __ movp(rbx, args.GetArgumentOperand(1));
1124 __ movp(args.GetArgumentOperand(0), rbx);
1126 __ j(not_zero, &loop); // While non-zero.
1127 __ DropUnderReturnAddress(1, rbx); // Drop one slot under return address.
1128 __ decp(rax); // One fewer argument (first argument is new receiver).
1131 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
1132 // or a function proxy via CALL_FUNCTION_PROXY.
1133 { Label function, non_proxy;
1135 __ j(zero, &function);
1137 __ cmpp(rdx, Immediate(1));
1138 __ j(not_equal, &non_proxy);
1140 __ PopReturnAddressTo(rdx);
1141 __ Push(rdi); // re-add proxy object as additional argument
1142 __ PushReturnAddressFrom(rdx);
1144 __ GetBuiltinEntry(rdx, Context::CALL_FUNCTION_PROXY_BUILTIN_INDEX);
1145 __ jmp(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1146 RelocInfo::CODE_TARGET);
1148 __ bind(&non_proxy);
1149 __ GetBuiltinEntry(rdx, Context::CALL_NON_FUNCTION_BUILTIN_INDEX);
1150 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1151 RelocInfo::CODE_TARGET);
1155 // 5b. Get the code to call from the function and check that the number of
1156 // expected arguments matches what we're providing. If so, jump
1157 // (tail-call) to the code in register edx without checking arguments.
1158 __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
1159 __ LoadSharedFunctionInfoSpecialField(rbx, rdx,
1160 SharedFunctionInfo::kFormalParameterCountOffset);
1161 __ movp(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
1164 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1165 RelocInfo::CODE_TARGET);
1167 ParameterCount expected(0);
1168 __ InvokeCode(rdx, expected, expected, JUMP_FUNCTION, NullCallWrapper());
1172 static void Generate_PushAppliedArguments(MacroAssembler* masm,
1173 const int argumentsOffset,
1174 const int indexOffset,
1175 const int limitOffset) {
1176 Register receiver = LoadDescriptor::ReceiverRegister();
1177 Register key = LoadDescriptor::NameRegister();
1178 Register slot = LoadDescriptor::SlotRegister();
1179 Register vector = LoadWithVectorDescriptor::VectorRegister();
1181 // Copy all arguments from the array to the stack.
1183 __ movp(key, Operand(rbp, indexOffset));
1186 __ movp(receiver, Operand(rbp, argumentsOffset)); // load arguments
1188 // Use inline caching to speed up access to arguments.
1189 Code::Kind kinds[] = {Code::KEYED_LOAD_IC};
1190 FeedbackVectorSpec spec(0, 1, kinds);
1191 Handle<TypeFeedbackVector> feedback_vector =
1192 masm->isolate()->factory()->NewTypeFeedbackVector(&spec);
1193 int index = feedback_vector->GetIndex(FeedbackVectorICSlot(0));
1194 __ Move(slot, Smi::FromInt(index));
1195 __ Move(vector, feedback_vector);
1197 KeyedLoadICStub(masm->isolate(), LoadICState(kNoExtraICState)).GetCode();
1198 __ Call(ic, RelocInfo::CODE_TARGET);
1199 // It is important that we do not have a test instruction after the
1200 // call. A test instruction after the call is used to indicate that
1201 // we have generated an inline version of the keyed load. In this
1202 // case, we know that we are not generating a test instruction next.
1204 // Push the nth argument.
1207 // Update the index on the stack and in register key.
1208 __ movp(key, Operand(rbp, indexOffset));
1209 __ SmiAddConstant(key, key, Smi::FromInt(1));
1210 __ movp(Operand(rbp, indexOffset), key);
1213 __ cmpp(key, Operand(rbp, limitOffset));
1214 __ j(not_equal, &loop);
1216 // On exit, the pushed arguments count is in rax, untagged
1217 __ SmiToInteger64(rax, key);
1221 // Used by FunctionApply and ReflectApply
1222 static void Generate_ApplyHelper(MacroAssembler* masm, bool targetIsArgument) {
1223 const int kFormalParameters = targetIsArgument ? 3 : 2;
1224 const int kStackSize = kFormalParameters + 1;
1227 // rsp : return address
1228 // rsp[8] : arguments
1229 // rsp[16] : receiver ("this")
1230 // rsp[24] : function
1232 FrameScope frame_scope(masm, StackFrame::INTERNAL);
1234 // rbp : Old base pointer
1235 // rbp[8] : return address
1236 // rbp[16] : function arguments
1237 // rbp[24] : receiver
1238 // rbp[32] : function
1239 static const int kArgumentsOffset = kFPOnStackSize + kPCOnStackSize;
1240 static const int kReceiverOffset = kArgumentsOffset + kPointerSize;
1241 static const int kFunctionOffset = kReceiverOffset + kPointerSize;
1243 __ Push(Operand(rbp, kFunctionOffset));
1244 __ Push(Operand(rbp, kArgumentsOffset));
1245 if (targetIsArgument) {
1246 __ InvokeBuiltin(Context::REFLECT_APPLY_PREPARE_BUILTIN_INDEX,
1249 __ InvokeBuiltin(Context::APPLY_PREPARE_BUILTIN_INDEX, CALL_FUNCTION);
1252 Generate_CheckStackOverflow(masm, kFunctionOffset, kRaxIsSmiTagged);
1254 // Push current index and limit.
1255 const int kLimitOffset =
1256 StandardFrameConstants::kExpressionsOffset - 1 * kPointerSize;
1257 const int kIndexOffset = kLimitOffset - 1 * kPointerSize;
1258 __ Push(rax); // limit
1259 __ Push(Immediate(0)); // index
1261 // Get the receiver.
1262 __ movp(rbx, Operand(rbp, kReceiverOffset));
1264 // Check that the function is a JS function (otherwise it must be a proxy).
1265 Label push_receiver;
1266 __ movp(rdi, Operand(rbp, kFunctionOffset));
1267 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
1268 __ j(not_equal, &push_receiver);
1270 // Change context eagerly to get the right global object if necessary.
1271 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
1273 // Do not transform the receiver for strict mode functions.
1274 Label call_to_object, use_global_proxy;
1275 __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
1276 __ testb(FieldOperand(rdx, SharedFunctionInfo::kStrictModeByteOffset),
1277 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte));
1278 __ j(not_equal, &push_receiver);
1280 // Do not transform the receiver for natives.
1281 __ testb(FieldOperand(rdx, SharedFunctionInfo::kNativeByteOffset),
1282 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte));
1283 __ j(not_equal, &push_receiver);
1285 // Compute the receiver in sloppy mode.
1286 __ JumpIfSmi(rbx, &call_to_object, Label::kNear);
1287 __ CompareRoot(rbx, Heap::kNullValueRootIndex);
1288 __ j(equal, &use_global_proxy);
1289 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
1290 __ j(equal, &use_global_proxy);
1292 // If given receiver is already a JavaScript object then there's no
1293 // reason for converting it.
1294 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1295 __ CmpObjectType(rbx, FIRST_SPEC_OBJECT_TYPE, rcx);
1296 __ j(above_equal, &push_receiver);
1298 // Convert the receiver to an object.
1299 __ bind(&call_to_object);
1301 ToObjectStub stub(masm->isolate());
1304 __ jmp(&push_receiver, Label::kNear);
1306 __ bind(&use_global_proxy);
1308 Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
1309 __ movp(rbx, FieldOperand(rbx, GlobalObject::kGlobalProxyOffset));
1311 // Push the receiver.
1312 __ bind(&push_receiver);
1315 // Loop over the arguments array, pushing each value to the stack
1316 Generate_PushAppliedArguments(
1317 masm, kArgumentsOffset, kIndexOffset, kLimitOffset);
1319 // Call the function.
1321 ParameterCount actual(rax);
1322 __ movp(rdi, Operand(rbp, kFunctionOffset));
1323 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
1324 __ j(not_equal, &call_proxy);
1325 __ InvokeFunction(rdi, actual, CALL_FUNCTION, NullCallWrapper());
1327 frame_scope.GenerateLeaveFrame();
1328 __ ret(kStackSize * kPointerSize); // remove this, receiver, and arguments
1330 // Call the function proxy.
1331 __ bind(&call_proxy);
1332 __ Push(rdi); // add function proxy as last argument
1335 __ GetBuiltinEntry(rdx, Context::CALL_FUNCTION_PROXY_BUILTIN_INDEX);
1336 __ call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1337 RelocInfo::CODE_TARGET);
1339 // Leave internal frame.
1341 __ ret(kStackSize * kPointerSize); // remove this, receiver, and arguments
1345 // Used by ReflectConstruct
1346 static void Generate_ConstructHelper(MacroAssembler* masm) {
1347 const int kFormalParameters = 3;
1348 const int kStackSize = kFormalParameters + 1;
1351 // rsp : return address
1352 // rsp[8] : original constructor (new.target)
1353 // rsp[16] : arguments
1354 // rsp[24] : constructor
1356 FrameScope frame_scope(masm, StackFrame::INTERNAL);
1358 // rbp : Old base pointer
1359 // rbp[8] : return address
1360 // rbp[16] : original constructor (new.target)
1361 // rbp[24] : arguments
1362 // rbp[32] : constructor
1363 static const int kNewTargetOffset = kFPOnStackSize + kPCOnStackSize;
1364 static const int kArgumentsOffset = kNewTargetOffset + kPointerSize;
1365 static const int kFunctionOffset = kArgumentsOffset + kPointerSize;
1367 // If newTarget is not supplied, set it to constructor
1368 Label validate_arguments;
1369 __ movp(rax, Operand(rbp, kNewTargetOffset));
1370 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
1371 __ j(not_equal, &validate_arguments, Label::kNear);
1372 __ movp(rax, Operand(rbp, kFunctionOffset));
1373 __ movp(Operand(rbp, kNewTargetOffset), rax);
1375 // Validate arguments
1376 __ bind(&validate_arguments);
1377 __ Push(Operand(rbp, kFunctionOffset));
1378 __ Push(Operand(rbp, kArgumentsOffset));
1379 __ Push(Operand(rbp, kNewTargetOffset));
1380 __ InvokeBuiltin(Context::REFLECT_CONSTRUCT_PREPARE_BUILTIN_INDEX,
1383 Generate_CheckStackOverflow(masm, kFunctionOffset, kRaxIsSmiTagged);
1385 // Push current index and limit.
1386 const int kLimitOffset =
1387 StandardFrameConstants::kExpressionsOffset - 1 * kPointerSize;
1388 const int kIndexOffset = kLimitOffset - 1 * kPointerSize;
1389 __ Push(rax); // limit
1390 __ Push(Immediate(0)); // index
1391 // Push the constructor function as callee.
1392 __ Push(Operand(rbp, kFunctionOffset));
1394 // Loop over the arguments array, pushing each value to the stack
1395 Generate_PushAppliedArguments(
1396 masm, kArgumentsOffset, kIndexOffset, kLimitOffset);
1398 // Use undefined feedback vector
1399 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
1400 __ movp(rdi, Operand(rbp, kFunctionOffset));
1401 __ movp(rcx, Operand(rbp, kNewTargetOffset));
1403 // Call the function.
1404 CallConstructStub stub(masm->isolate(), SUPER_CONSTRUCTOR_CALL);
1405 __ call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
1407 // Leave internal frame.
1409 // remove this, target, arguments and newTarget
1410 __ ret(kStackSize * kPointerSize);
1414 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
1415 Generate_ApplyHelper(masm, false);
1419 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1420 Generate_ApplyHelper(masm, true);
1424 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1425 Generate_ConstructHelper(masm);
1429 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
1430 // ----------- S t a t e -------------
1432 // -- rsp[0] : return address
1433 // -- rsp[8] : last argument
1434 // -----------------------------------
1435 Label generic_array_code;
1437 // Get the InternalArray function.
1438 __ LoadGlobalFunction(Context::INTERNAL_ARRAY_FUNCTION_INDEX, rdi);
1440 if (FLAG_debug_code) {
1441 // Initial map for the builtin InternalArray functions should be maps.
1442 __ movp(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
1443 // Will both indicate a NULL and a Smi.
1444 STATIC_ASSERT(kSmiTag == 0);
1445 Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
1446 __ Check(not_smi, kUnexpectedInitialMapForInternalArrayFunction);
1447 __ CmpObjectType(rbx, MAP_TYPE, rcx);
1448 __ Check(equal, kUnexpectedInitialMapForInternalArrayFunction);
1451 // Run the native code for the InternalArray function called as a normal
1454 InternalArrayConstructorStub stub(masm->isolate());
1455 __ TailCallStub(&stub);
1459 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
1460 // ----------- S t a t e -------------
1462 // -- rsp[0] : return address
1463 // -- rsp[8] : last argument
1464 // -----------------------------------
1465 Label generic_array_code;
1467 // Get the Array function.
1468 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rdi);
1470 if (FLAG_debug_code) {
1471 // Initial map for the builtin Array functions should be maps.
1472 __ movp(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
1473 // Will both indicate a NULL and a Smi.
1474 STATIC_ASSERT(kSmiTag == 0);
1475 Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
1476 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
1477 __ CmpObjectType(rbx, MAP_TYPE, rcx);
1478 __ Check(equal, kUnexpectedInitialMapForArrayFunction);
1482 // Run the native code for the Array function called as a normal function.
1484 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
1485 ArrayConstructorStub stub(masm->isolate());
1486 __ TailCallStub(&stub);
1490 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
1491 // ----------- S t a t e -------------
1492 // -- rax : number of arguments
1493 // -- rdi : constructor function
1494 // -- rsp[0] : return address
1495 // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1496 // -- rsp[(argc + 1) * 8] : receiver
1497 // -----------------------------------
1498 Counters* counters = masm->isolate()->counters();
1499 __ IncrementCounter(counters->string_ctor_calls(), 1);
1501 if (FLAG_debug_code) {
1502 __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, rcx);
1504 __ Assert(equal, kUnexpectedStringFunction);
1507 // Load the first argument into rax and get rid of the rest
1508 // (including the receiver).
1509 StackArgumentsAccessor args(rsp, rax);
1512 __ j(zero, &no_arguments);
1513 __ movp(rbx, args.GetArgumentOperand(1));
1514 __ PopReturnAddressTo(rcx);
1515 __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1516 __ PushReturnAddressFrom(rcx);
1519 // Lookup the argument in the number to string cache.
1520 Label not_cached, argument_is_string;
1521 __ LookupNumberStringCache(rax, // Input.
1526 __ IncrementCounter(counters->string_ctor_cached_number(), 1);
1527 __ bind(&argument_is_string);
1529 // ----------- S t a t e -------------
1530 // -- rbx : argument converted to string
1531 // -- rdi : constructor function
1532 // -- rsp[0] : return address
1533 // -----------------------------------
1535 // Allocate a JSValue and put the tagged pointer into rax.
1537 __ Allocate(JSValue::kSize,
1539 rcx, // New allocation top (we ignore it).
1545 __ LoadGlobalFunctionInitialMap(rdi, rcx);
1546 if (FLAG_debug_code) {
1547 __ cmpb(FieldOperand(rcx, Map::kInstanceSizeOffset),
1548 Immediate(JSValue::kSize >> kPointerSizeLog2));
1549 __ Assert(equal, kUnexpectedStringWrapperInstanceSize);
1550 __ cmpb(FieldOperand(rcx, Map::kUnusedPropertyFieldsOffset), Immediate(0));
1551 __ Assert(equal, kUnexpectedUnusedPropertiesOfStringWrapper);
1553 __ movp(FieldOperand(rax, HeapObject::kMapOffset), rcx);
1555 // Set properties and elements.
1556 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
1557 __ movp(FieldOperand(rax, JSObject::kPropertiesOffset), rcx);
1558 __ movp(FieldOperand(rax, JSObject::kElementsOffset), rcx);
1561 __ movp(FieldOperand(rax, JSValue::kValueOffset), rbx);
1563 // Ensure the object is fully initialized.
1564 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
1566 // We're done. Return.
1569 // The argument was not found in the number to string cache. Check
1570 // if it's a string already before calling the conversion builtin.
1571 Label convert_argument;
1572 __ bind(¬_cached);
1573 STATIC_ASSERT(kSmiTag == 0);
1574 __ JumpIfSmi(rax, &convert_argument);
1575 Condition is_string = masm->IsObjectStringType(rax, rbx, rcx);
1576 __ j(NegateCondition(is_string), &convert_argument);
1578 __ IncrementCounter(counters->string_ctor_string_value(), 1);
1579 __ jmp(&argument_is_string);
1581 // Invoke the conversion builtin and put the result into rbx.
1582 __ bind(&convert_argument);
1583 __ IncrementCounter(counters->string_ctor_conversions(), 1);
1585 FrameScope scope(masm, StackFrame::INTERNAL);
1586 __ Push(rdi); // Preserve the function.
1587 ToStringStub stub(masm->isolate());
1592 __ jmp(&argument_is_string);
1594 // Load the empty string into rbx, remove the receiver from the
1595 // stack, and jump back to the case where the argument is a string.
1596 __ bind(&no_arguments);
1597 __ LoadRoot(rbx, Heap::kempty_stringRootIndex);
1598 __ PopReturnAddressTo(rcx);
1599 __ leap(rsp, Operand(rsp, kPointerSize));
1600 __ PushReturnAddressFrom(rcx);
1601 __ jmp(&argument_is_string);
1603 // At this point the argument is already a string. Call runtime to
1604 // create a string wrapper.
1605 __ bind(&gc_required);
1606 __ IncrementCounter(counters->string_ctor_gc_required(), 1);
1608 FrameScope scope(masm, StackFrame::INTERNAL);
1610 __ CallRuntime(Runtime::kNewStringWrapper, 1);
1616 static void ArgumentsAdaptorStackCheck(MacroAssembler* masm,
1617 Label* stack_overflow) {
1618 // ----------- S t a t e -------------
1619 // -- rax : actual number of arguments
1620 // -- rbx : expected number of arguments
1621 // -- rdi: function (passed through to callee)
1622 // -----------------------------------
1623 // Check the stack for overflow. We are not trying to catch
1624 // interruptions (e.g. debug break and preemption) here, so the "real stack
1625 // limit" is checked.
1627 __ LoadRoot(rdx, Heap::kRealStackLimitRootIndex);
1629 // Make rcx the space we have left. The stack might already be overflowed
1630 // here which will cause rcx to become negative.
1632 // Make rdx the space we need for the array when it is unrolled onto the
1635 __ shlp(rdx, Immediate(kPointerSizeLog2));
1636 // Check if the arguments will overflow the stack.
1638 __ j(less_equal, stack_overflow); // Signed comparison.
1642 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1646 // Store the arguments adaptor context sentinel.
1647 __ Push(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
1649 // Push the function on the stack.
1652 // Preserve the number of arguments on the stack. Must preserve rax,
1653 // rbx and rcx because these registers are used when copying the
1654 // arguments and the receiver.
1655 __ Integer32ToSmi(r8, rax);
1660 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1661 // Retrieve the number of arguments from the stack. Number is a Smi.
1662 __ movp(rbx, Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset));
1668 // Remove caller arguments from the stack.
1669 __ PopReturnAddressTo(rcx);
1670 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
1671 __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
1672 __ PushReturnAddressFrom(rcx);
1676 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1677 // ----------- S t a t e -------------
1678 // -- rax : actual number of arguments
1679 // -- rbx : expected number of arguments
1680 // -- rdi: function (passed through to callee)
1681 // -----------------------------------
1683 Label invoke, dont_adapt_arguments;
1684 Counters* counters = masm->isolate()->counters();
1685 __ IncrementCounter(counters->arguments_adaptors(), 1);
1687 Label stack_overflow;
1688 ArgumentsAdaptorStackCheck(masm, &stack_overflow);
1690 Label enough, too_few;
1691 __ movp(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
1693 __ j(less, &too_few);
1694 __ cmpp(rbx, Immediate(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
1695 __ j(equal, &dont_adapt_arguments);
1697 { // Enough parameters: Actual >= expected.
1699 EnterArgumentsAdaptorFrame(masm);
1701 // Copy receiver and all expected arguments.
1702 const int offset = StandardFrameConstants::kCallerSPOffset;
1703 __ leap(rax, Operand(rbp, rax, times_pointer_size, offset));
1704 __ Set(r8, -1); // account for receiver
1709 __ Push(Operand(rax, 0));
1710 __ subp(rax, Immediate(kPointerSize));
1716 { // Too few parameters: Actual < expected.
1719 // If the function is strong we need to throw an error.
1720 Label no_strong_error;
1721 __ movp(kScratchRegister,
1722 FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
1723 __ testb(FieldOperand(kScratchRegister,
1724 SharedFunctionInfo::kStrongModeByteOffset),
1725 Immediate(1 << SharedFunctionInfo::kStrongModeBitWithinByte));
1726 __ j(equal, &no_strong_error, Label::kNear);
1728 // What we really care about is the required number of arguments.
1730 if (kPointerSize == kInt32Size) {
1733 FieldOperand(kScratchRegister, SharedFunctionInfo::kLengthOffset));
1734 __ SmiToInteger32(kScratchRegister, kScratchRegister);
1736 // See comment near kLengthOffset in src/objects.h
1739 FieldOperand(kScratchRegister, SharedFunctionInfo::kLengthOffset));
1740 __ shrq(kScratchRegister, Immediate(1));
1743 __ cmpp(rax, kScratchRegister);
1744 __ j(greater_equal, &no_strong_error, Label::kNear);
1747 FrameScope frame(masm, StackFrame::MANUAL);
1748 EnterArgumentsAdaptorFrame(masm);
1749 __ CallRuntime(Runtime::kThrowStrongModeTooFewArguments, 0);
1752 __ bind(&no_strong_error);
1753 EnterArgumentsAdaptorFrame(masm);
1755 // Copy receiver and all actual arguments.
1756 const int offset = StandardFrameConstants::kCallerSPOffset;
1757 __ leap(rdi, Operand(rbp, rax, times_pointer_size, offset));
1758 __ Set(r8, -1); // account for receiver
1763 __ Push(Operand(rdi, 0));
1764 __ subp(rdi, Immediate(kPointerSize));
1768 // Fill remaining expected arguments with undefined values.
1770 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
1773 __ Push(kScratchRegister);
1777 // Restore function pointer.
1778 __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1781 // Call the entry point.
1784 // rax : expected number of arguments
1785 // rdi: function (passed through to callee)
1788 // Store offset of return address for deoptimizer.
1789 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1791 // Leave frame and return.
1792 LeaveArgumentsAdaptorFrame(masm);
1795 // -------------------------------------------
1796 // Dont adapt arguments.
1797 // -------------------------------------------
1798 __ bind(&dont_adapt_arguments);
1801 __ bind(&stack_overflow);
1803 FrameScope frame(masm, StackFrame::MANUAL);
1804 EnterArgumentsAdaptorFrame(masm);
1805 __ InvokeBuiltin(Context::STACK_OVERFLOW_BUILTIN_INDEX, CALL_FUNCTION);
1811 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1812 // Lookup the function in the JavaScript frame.
1813 __ movp(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1815 FrameScope scope(masm, StackFrame::INTERNAL);
1816 // Pass function as argument.
1818 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1822 // If the code object is null, just return to the unoptimized code.
1823 __ cmpp(rax, Immediate(0));
1824 __ j(not_equal, &skip, Label::kNear);
1829 // Load deoptimization data from the code object.
1830 __ movp(rbx, Operand(rax, Code::kDeoptimizationDataOffset - kHeapObjectTag));
1832 // Load the OSR entrypoint offset from the deoptimization data.
1833 __ SmiToInteger32(rbx, Operand(rbx, FixedArray::OffsetOfElementAt(
1834 DeoptimizationInputData::kOsrPcOffsetIndex) - kHeapObjectTag));
1836 // Compute the target address = code_obj + header_size + osr_offset
1837 __ leap(rax, Operand(rax, rbx, times_1, Code::kHeaderSize - kHeapObjectTag));
1839 // Overwrite the return address on the stack.
1840 __ movq(StackOperandForReturnAddress(0), rax);
1842 // And "return" to the OSR entry point of the function.
1847 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
1848 // We check the stack limit as indicator that recompilation might be done.
1850 __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
1851 __ j(above_equal, &ok);
1853 FrameScope scope(masm, StackFrame::INTERNAL);
1854 __ CallRuntime(Runtime::kStackGuard, 0);
1856 __ jmp(masm->isolate()->builtins()->OnStackReplacement(),
1857 RelocInfo::CODE_TARGET);
1866 } // namespace internal
1869 #endif // V8_TARGET_ARCH_X64