1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 #include "src/code-factory.h"
8 #include "src/codegen.h"
9 #include "src/deoptimizer.h"
10 #include "src/full-codegen/full-codegen.h"
16 #define __ ACCESS_MASM(masm)
19 void Builtins::Generate_Adaptor(MacroAssembler* masm,
21 BuiltinExtraArguments extra_args) {
22 // ----------- S t a t e -------------
23 // -- rax : number of arguments excluding receiver
24 // -- rdi : called function (only guaranteed when
25 // extra_args requires it)
27 // -- rsp[0] : return address
28 // -- rsp[8] : last argument
30 // -- rsp[8 * argc] : first argument (argc == rax)
31 // -- rsp[8 * (argc + 1)] : receiver
32 // -----------------------------------
34 // Insert extra arguments.
35 int num_extra_args = 0;
36 if (extra_args == NEEDS_CALLED_FUNCTION) {
38 __ PopReturnAddressTo(kScratchRegister);
40 __ PushReturnAddressFrom(kScratchRegister);
42 DCHECK(extra_args == NO_EXTRA_ARGUMENTS);
45 // JumpToExternalReference expects rax to contain the number of arguments
46 // including the receiver and the extra arguments.
47 __ addp(rax, Immediate(num_extra_args + 1));
48 __ JumpToExternalReference(ExternalReference(id, masm->isolate()), 1);
52 static void CallRuntimePassFunction(
53 MacroAssembler* masm, Runtime::FunctionId function_id) {
54 FrameScope scope(masm, StackFrame::INTERNAL);
55 // Push a copy of the function onto the stack.
57 // Function is also the parameter to the runtime call.
60 __ CallRuntime(function_id, 1);
66 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
67 __ movp(kScratchRegister,
68 FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
69 __ movp(kScratchRegister,
70 FieldOperand(kScratchRegister, SharedFunctionInfo::kCodeOffset));
71 __ leap(kScratchRegister, FieldOperand(kScratchRegister, Code::kHeaderSize));
72 __ jmp(kScratchRegister);
76 static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
77 __ leap(rax, FieldOperand(rax, Code::kHeaderSize));
82 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
83 // Checking whether the queued function is ready for install is optional,
84 // since we come across interrupts and stack checks elsewhere. However,
85 // not checking may delay installing ready functions, and always checking
86 // would be quite expensive. A good compromise is to first check against
87 // stack limit as a cue for an interrupt signal.
89 __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
90 __ j(above_equal, &ok);
92 CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode);
93 GenerateTailCallToReturnedCode(masm);
96 GenerateTailCallToSharedCode(masm);
100 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
101 bool is_api_function,
102 bool create_memento) {
103 // ----------- S t a t e -------------
104 // -- rax: number of arguments
105 // -- rdi: constructor function
106 // -- rbx: allocation site or undefined
107 // -- rdx: original constructor
108 // -----------------------------------
110 // Should never create mementos for api functions.
111 DCHECK(!is_api_function || !create_memento);
113 // Enter a construct frame.
115 FrameScope scope(masm, StackFrame::CONSTRUCT);
117 // Preserve the incoming parameters on the stack.
118 __ AssertUndefinedOrAllocationSite(rbx);
120 __ Integer32ToSmi(rax, rax);
125 // Try to allocate the object without transitioning into C code. If any of
126 // the preconditions is not met, the code bails out to the runtime call.
127 Label rt_call, allocated;
128 if (FLAG_inline_new) {
129 ExternalReference debug_step_in_fp =
130 ExternalReference::debug_step_in_fp_address(masm->isolate());
131 __ Move(kScratchRegister, debug_step_in_fp);
132 __ cmpp(Operand(kScratchRegister, 0), Immediate(0));
133 __ j(not_equal, &rt_call);
135 // Fall back to runtime if the original constructor and function differ.
137 __ j(not_equal, &rt_call);
139 // Verified that the constructor is a JSFunction.
140 // Load the initial map and verify that it is in fact a map.
142 __ movp(rax, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
143 // Will both indicate a NULL and a Smi
144 DCHECK(kSmiTag == 0);
145 __ JumpIfSmi(rax, &rt_call);
147 // rax: initial map (if proven valid below)
148 __ CmpObjectType(rax, MAP_TYPE, rbx);
149 __ j(not_equal, &rt_call);
151 // Check that the constructor is not constructing a JSFunction (see
152 // comments in Runtime_NewObject in runtime.cc). In which case the
153 // initial map's instance type would be JS_FUNCTION_TYPE.
156 __ CmpInstanceType(rax, JS_FUNCTION_TYPE);
157 __ j(equal, &rt_call);
158 if (!is_api_function) {
160 // The code below relies on these assumptions.
161 STATIC_ASSERT(Map::Counter::kShift + Map::Counter::kSize == 32);
162 // Check if slack tracking is enabled.
163 __ movl(rsi, FieldOperand(rax, Map::kBitField3Offset));
164 __ shrl(rsi, Immediate(Map::Counter::kShift));
165 __ cmpl(rsi, Immediate(Map::kSlackTrackingCounterEnd));
166 __ j(less, &allocate);
167 // Decrease generous allocation count.
168 __ subl(FieldOperand(rax, Map::kBitField3Offset),
169 Immediate(1 << Map::Counter::kShift));
171 __ cmpl(rsi, Immediate(Map::kSlackTrackingCounterEnd));
172 __ j(not_equal, &allocate);
178 __ Push(rdi); // constructor
179 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
184 __ movl(rsi, Immediate(Map::kSlackTrackingCounterEnd - 1));
189 // Now allocate the JSObject on the heap.
190 __ movzxbp(rdi, FieldOperand(rax, Map::kInstanceSizeOffset));
191 __ shlp(rdi, Immediate(kPointerSizeLog2));
192 if (create_memento) {
193 __ addp(rdi, Immediate(AllocationMemento::kSize));
195 // rdi: size of new object
201 NO_ALLOCATION_FLAGS);
202 Factory* factory = masm->isolate()->factory();
203 // Allocated the JSObject, now initialize the fields.
205 // rbx: JSObject (not HeapObject tagged - the actual address).
206 // rdi: start of next object (including memento if create_memento)
207 __ movp(Operand(rbx, JSObject::kMapOffset), rax);
208 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
209 __ movp(Operand(rbx, JSObject::kPropertiesOffset), rcx);
210 __ movp(Operand(rbx, JSObject::kElementsOffset), rcx);
211 // Set extra fields in the newly allocated object.
214 // rdi: start of next object (including memento if create_memento)
215 // rsi: slack tracking counter (non-API function case)
216 __ leap(rcx, Operand(rbx, JSObject::kHeaderSize));
217 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
218 if (!is_api_function) {
219 Label no_inobject_slack_tracking;
221 // Check if slack tracking is enabled.
222 __ cmpl(rsi, Immediate(Map::kSlackTrackingCounterEnd));
223 __ j(less, &no_inobject_slack_tracking);
225 // Allocate object with a slack.
229 rax, Map::kInObjectPropertiesOrConstructorFunctionIndexOffset));
230 __ movzxbp(rax, FieldOperand(rax, Map::kUnusedPropertyFieldsOffset));
233 Operand(rbx, rsi, times_pointer_size, JSObject::kHeaderSize));
234 // rsi: offset of first field after pre-allocated fields
235 if (FLAG_debug_code) {
237 __ Assert(less_equal,
238 kUnexpectedNumberOfPreAllocatedPropertyFields);
240 __ InitializeFieldsWithFiller(rcx, rsi, rdx);
241 __ LoadRoot(rdx, Heap::kOnePointerFillerMapRootIndex);
242 // Fill the remaining fields with one pointer filler map.
244 __ bind(&no_inobject_slack_tracking);
246 if (create_memento) {
247 __ leap(rsi, Operand(rdi, -AllocationMemento::kSize));
248 __ InitializeFieldsWithFiller(rcx, rsi, rdx);
250 // Fill in memento fields if necessary.
251 // rsi: points to the allocated but uninitialized memento.
252 __ Move(Operand(rsi, AllocationMemento::kMapOffset),
253 factory->allocation_memento_map());
254 // Get the cell or undefined.
255 __ movp(rdx, Operand(rsp, 3 * kPointerSize));
256 __ AssertUndefinedOrAllocationSite(rdx);
257 __ movp(Operand(rsi, AllocationMemento::kAllocationSiteOffset), rdx);
259 __ InitializeFieldsWithFiller(rcx, rdi, rdx);
262 // Add the object tag to make the JSObject real, so that we can continue
263 // and jump into the continuation code at any time from now on.
264 // rbx: JSObject (untagged)
265 __ orp(rbx, Immediate(kHeapObjectTag));
267 // Continue with JSObject being successfully allocated
268 // rbx: JSObject (tagged)
272 // Allocate the new receiver object using the runtime call.
273 // rdx: original constructor
275 int offset = kPointerSize;
276 if (create_memento) {
277 // Get the cell or allocation site.
278 __ movp(rdi, Operand(rsp, kPointerSize * 3));
279 __ Push(rdi); // argument 1: allocation site
280 offset += kPointerSize;
283 // Must restore rsi (context) and rdi (constructor) before calling runtime.
284 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
285 __ movp(rdi, Operand(rsp, offset));
286 __ Push(rdi); // argument 2/1: constructor function
287 __ Push(rdx); // argument 3/2: original constructor
288 if (create_memento) {
289 __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 3);
291 __ CallRuntime(Runtime::kNewObject, 2);
293 __ movp(rbx, rax); // store result in rbx
295 // Runtime_NewObjectWithAllocationSite increments allocation count.
296 // Skip the increment.
297 Label count_incremented;
298 if (create_memento) {
299 __ jmp(&count_incremented);
302 // New object allocated.
303 // rbx: newly allocated object
306 if (create_memento) {
307 __ movp(rcx, Operand(rsp, 3 * kPointerSize));
308 __ Cmp(rcx, masm->isolate()->factory()->undefined_value());
309 __ j(equal, &count_incremented);
310 // rcx is an AllocationSite. We are creating a memento from it, so we
311 // need to increment the memento create count.
313 FieldOperand(rcx, AllocationSite::kPretenureCreateCountOffset),
315 __ bind(&count_incremented);
318 // Restore the parameters.
322 // Retrieve smi-tagged arguments count from the stack.
323 __ movp(rax, Operand(rsp, 0));
324 __ SmiToInteger32(rax, rax);
326 // Push new.target onto the construct frame. This is stored just below the
327 // receiver on the stack.
330 // Push the allocated receiver to the stack. We need two copies
331 // because we may have to return the original one and the calling
332 // conventions dictate that the called function pops the receiver.
336 // Set up pointer to last argument.
337 __ leap(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
339 // Copy arguments and receiver to the expression stack.
344 __ Push(Operand(rbx, rcx, times_pointer_size, 0));
347 __ j(greater_equal, &loop);
349 // Call the function.
350 if (is_api_function) {
351 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
353 masm->isolate()->builtins()->HandleApiCallConstruct();
354 __ Call(code, RelocInfo::CODE_TARGET);
356 ParameterCount actual(rax);
357 __ InvokeFunction(rdi, actual, CALL_FUNCTION, NullCallWrapper());
360 // Store offset of return address for deoptimizer.
361 if (!is_api_function) {
362 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
365 // Restore context from the frame.
366 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
368 // If the result is an object (in the ECMA sense), we should get rid
369 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
371 Label use_receiver, exit;
372 // If the result is a smi, it is *not* an object in the ECMA sense.
373 __ JumpIfSmi(rax, &use_receiver);
375 // If the type of the result (stored in its map) is less than
376 // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
377 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
378 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
379 __ j(above_equal, &exit);
381 // Throw away the result of the constructor invocation and use the
382 // on-stack receiver as the result.
383 __ bind(&use_receiver);
384 __ movp(rax, Operand(rsp, 0));
386 // Restore the arguments count and leave the construct frame. The arguments
387 // count is stored below the reciever and the new.target.
389 __ movp(rbx, Operand(rsp, 2 * kPointerSize));
391 // Leave construct frame.
394 // Remove caller arguments from the stack and return.
395 __ PopReturnAddressTo(rcx);
396 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
397 __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
398 __ PushReturnAddressFrom(rcx);
399 Counters* counters = masm->isolate()->counters();
400 __ IncrementCounter(counters->constructed_objects(), 1);
405 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
406 Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new);
410 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
411 Generate_JSConstructStubHelper(masm, true, false);
415 void Builtins::Generate_JSConstructStubForDerived(MacroAssembler* masm) {
416 // ----------- S t a t e -------------
417 // -- rax: number of arguments
418 // -- rdi: constructor function
419 // -- rbx: allocation site or undefined
420 // -- rdx: original constructor
421 // -----------------------------------
424 FrameScope frame_scope(masm, StackFrame::CONSTRUCT);
426 // Preserve allocation site.
427 __ AssertUndefinedOrAllocationSite(rbx);
430 // Store a smi-tagged arguments count on the stack.
431 __ Integer32ToSmi(rax, rax);
433 __ SmiToInteger32(rax, rax);
438 // receiver is the hole.
439 __ Push(masm->isolate()->factory()->the_hole_value());
441 // Set up pointer to last argument.
442 __ leap(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset));
444 // Copy arguments and receiver to the expression stack.
449 __ Push(Operand(rbx, rcx, times_pointer_size, 0));
452 __ j(greater_equal, &loop);
456 ExternalReference debug_step_in_fp =
457 ExternalReference::debug_step_in_fp_address(masm->isolate());
458 __ Move(kScratchRegister, debug_step_in_fp);
459 __ cmpp(Operand(kScratchRegister, 0), Immediate(0));
460 __ j(equal, &skip_step_in);
465 __ CallRuntime(Runtime::kHandleStepInForDerivedConstructors, 1);
469 __ bind(&skip_step_in);
471 // Call the function.
472 ParameterCount actual(rax);
473 __ InvokeFunction(rdi, actual, CALL_FUNCTION, NullCallWrapper());
475 // Restore context from the frame.
476 __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
478 // Get arguments count, skipping over new.target.
479 __ movp(rbx, Operand(rsp, kPointerSize)); // Get arguments count.
480 } // Leave construct frame.
482 // Remove caller arguments from the stack and return.
483 __ PopReturnAddressTo(rcx);
484 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
485 __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
486 __ PushReturnAddressFrom(rcx);
491 enum IsTagged { kRaxIsSmiTagged, kRaxIsUntaggedInt };
494 // Clobbers rcx, rdx, kScratchRegister; preserves all other registers.
495 static void Generate_CheckStackOverflow(MacroAssembler* masm,
496 const int calleeOffset,
497 IsTagged rax_is_tagged) {
498 // rax : the number of items to be pushed to the stack
500 // Check the stack for overflow. We are not trying to catch
501 // interruptions (e.g. debug break and preemption) here, so the "real stack
502 // limit" is checked.
504 __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex);
506 // Make rcx the space we have left. The stack might already be overflowed
507 // here which will cause rcx to become negative.
508 __ subp(rcx, kScratchRegister);
509 // Make rdx the space we need for the array when it is unrolled onto the
511 if (rax_is_tagged == kRaxIsSmiTagged) {
512 __ PositiveSmiTimesPowerOfTwoToInteger64(rdx, rax, kPointerSizeLog2);
514 DCHECK(rax_is_tagged == kRaxIsUntaggedInt);
516 __ shlq(rdx, Immediate(kPointerSizeLog2));
518 // Check if the arguments will overflow the stack.
520 __ j(greater, &okay); // Signed comparison.
522 // Out of stack space.
523 __ Push(Operand(rbp, calleeOffset));
524 if (rax_is_tagged == kRaxIsUntaggedInt) {
525 __ Integer32ToSmi(rax, rax);
528 __ InvokeBuiltin(Context::STACK_OVERFLOW_BUILTIN_INDEX, CALL_FUNCTION);
534 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
536 ProfileEntryHookStub::MaybeCallEntryHook(masm);
538 // Expects five C++ function parameters.
539 // - Address entry (ignored)
540 // - JSFunction* function (
541 // - Object* receiver
544 // (see Handle::Invoke in execution.cc).
546 // Open a C++ scope for the FrameScope.
548 // Platform specific argument handling. After this, the stack contains
549 // an internal frame and the pushed function and receiver, and
550 // register rax and rbx holds the argument count and argument array,
551 // while rdi holds the function pointer and rsi the context.
554 // MSVC parameters in:
555 // rcx : entry (ignored)
561 // Clear the context before we push it when entering the internal frame.
563 // Enter an internal frame.
564 FrameScope scope(masm, StackFrame::INTERNAL);
566 // Load the function context into rsi.
567 __ movp(rsi, FieldOperand(rdx, JSFunction::kContextOffset));
569 // Push the function and the receiver onto the stack.
573 // Load the number of arguments and setup pointer to the arguments.
575 // Load the previous frame pointer to access C argument on stack
576 __ movp(kScratchRegister, Operand(rbp, 0));
577 __ movp(rbx, Operand(kScratchRegister, EntryFrameConstants::kArgvOffset));
578 // Load the function pointer into rdi.
581 // GCC parameters in:
582 // rdi : entry (ignored)
591 // Clear the context before we push it when entering the internal frame.
593 // Enter an internal frame.
594 FrameScope scope(masm, StackFrame::INTERNAL);
596 // Push the function and receiver and setup the context.
599 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
601 // Load the number of arguments and setup pointer to the arguments.
606 // Current stack contents:
607 // [rsp + 2 * kPointerSize ... ] : Internal frame
608 // [rsp + kPointerSize] : function
610 // Current register contents:
616 // Check if we have enough stack space to push all arguments.
617 // The function is the first thing that was pushed above after entering
618 // the internal frame.
619 const int kFunctionOffset =
620 InternalFrameConstants::kCodeOffset - kRegisterSize;
621 // Expects argument count in rax. Clobbers rcx, rdx.
622 Generate_CheckStackOverflow(masm, kFunctionOffset, kRaxIsUntaggedInt);
624 // Copy arguments to the stack in a loop.
625 // Register rbx points to array of pointers to handle locations.
626 // Push the values of these handles.
628 __ Set(rcx, 0); // Set loop variable to 0.
631 __ movp(kScratchRegister, Operand(rbx, rcx, times_pointer_size, 0));
632 __ Push(Operand(kScratchRegister, 0)); // dereference handle
633 __ addp(rcx, Immediate(1));
636 __ j(not_equal, &loop);
640 // No type feedback cell is available
641 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
642 // Expects rdi to hold function pointer.
643 CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS);
646 ParameterCount actual(rax);
647 // Function must be in rdi.
648 __ InvokeFunction(rdi, actual, CALL_FUNCTION, NullCallWrapper());
650 // Exit the internal frame. Notice that this also removes the empty
651 // context and the function left on the stack by the code
655 // TODO(X64): Is argument correct? Is there a receiver to remove?
656 __ ret(1 * kPointerSize); // Remove receiver.
660 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
661 Generate_JSEntryTrampolineHelper(masm, false);
665 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
666 Generate_JSEntryTrampolineHelper(masm, true);
670 // Generate code for entering a JS function with the interpreter.
671 // On entry to the function the receiver and arguments have been pushed on the
672 // stack left to right. The actual argument count matches the formal parameter
673 // count expected by the function.
675 // The live registers are:
676 // o rdi: the JS function object being called
677 // o rsi: our context
678 // o rbp: the caller's frame pointer
679 // o rsp: stack pointer (pointing to return address)
681 // The function builds a JS frame. Please see JavaScriptFrameConstants in
682 // frames-x64.h for its layout.
683 // TODO(rmcilroy): We will need to include the current bytecode pointer in the
685 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
686 // Open a frame scope to indicate that there is a frame on the stack. The
687 // MANUAL indicates that the scope shouldn't actually generate code to set up
688 // the frame (that is done below).
689 FrameScope frame_scope(masm, StackFrame::MANUAL);
690 __ pushq(rbp); // Caller's frame pointer.
692 __ Push(rsi); // Callee's context.
693 __ Push(rdi); // Callee's JS function.
695 // Get the bytecode array from the function object and load the pointer to the
696 // first entry into edi (InterpreterBytecodeRegister).
697 __ movp(rax, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
698 __ movp(kInterpreterBytecodeArrayRegister,
699 FieldOperand(rax, SharedFunctionInfo::kFunctionDataOffset));
701 if (FLAG_debug_code) {
702 // Check function data field is actually a BytecodeArray object.
703 __ AssertNotSmi(kInterpreterBytecodeArrayRegister);
704 __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
706 __ Assert(equal, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
709 // Allocate the local and temporary register file on the stack.
711 // Load frame size from the BytecodeArray object.
712 __ movl(rcx, FieldOperand(kInterpreterBytecodeArrayRegister,
713 BytecodeArray::kFrameSizeOffset));
715 // Do a stack check to ensure we don't go over the limit.
719 __ CompareRoot(rdx, Heap::kRealStackLimitRootIndex);
720 __ j(above_equal, &ok, Label::kNear);
721 __ InvokeBuiltin(Context::STACK_OVERFLOW_BUILTIN_INDEX, CALL_FUNCTION);
724 // If ok, push undefined as the initial value for all register file entries.
727 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
728 __ j(always, &loop_check);
729 __ bind(&loop_header);
730 // TODO(rmcilroy): Consider doing more than one push per loop iteration.
732 // Continue loop if not done.
733 __ bind(&loop_check);
734 __ subp(rcx, Immediate(kPointerSize));
735 __ j(greater_equal, &loop_header, Label::kNear);
738 // TODO(rmcilroy): List of things not currently dealt with here but done in
739 // fullcodegen's prologue:
740 // - Support profiler (specifically profiling_counter).
741 // - Call ProfileEntryHookStub when isolate has a function_entry_hook.
742 // - Allow simulator stop operations if FLAG_stop_at is set.
743 // - Deal with sloppy mode functions which need to replace the
744 // receiver with the global proxy when called as functions (without an
745 // explicit receiver object).
746 // - Code aging of the BytecodeArray object.
747 // - Supporting FLAG_trace.
749 // The following items are also not done here, and will probably be done using
750 // explicit bytecodes instead:
751 // - Allocating a new local context if applicable.
752 // - Setting up a local binding to the this function, which is used in
753 // derived constructors with super calls.
754 // - Setting new.target if required.
755 // - Dealing with REST parameters (only if
756 // https://codereview.chromium.org/1235153006 doesn't land by then).
757 // - Dealing with argument objects.
759 // Perform stack guard check.
762 __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
763 __ j(above_equal, &ok, Label::kNear);
764 __ CallRuntime(Runtime::kStackGuard, 0);
768 // Load accumulator, register file, bytecode offset, dispatch table into
770 __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
771 __ movp(kInterpreterRegisterFileRegister, rbp);
773 kInterpreterRegisterFileRegister,
774 Immediate(kPointerSize + StandardFrameConstants::kFixedFrameSizeFromFp));
775 __ movp(kInterpreterBytecodeOffsetRegister,
776 Immediate(BytecodeArray::kHeaderSize - kHeapObjectTag));
777 __ LoadRoot(kInterpreterDispatchTableRegister,
778 Heap::kInterpreterTableRootIndex);
779 __ addp(kInterpreterDispatchTableRegister,
780 Immediate(FixedArray::kHeaderSize - kHeapObjectTag));
782 // Dispatch to the first bytecode handler for the function.
783 __ movzxbp(rbx, Operand(kInterpreterBytecodeArrayRegister,
784 kInterpreterBytecodeOffsetRegister, times_1, 0));
785 __ movp(rbx, Operand(kInterpreterDispatchTableRegister, rbx,
786 times_pointer_size, 0));
787 // TODO(rmcilroy): Make dispatch table point to code entrys to avoid untagging
788 // and header removal.
789 __ addp(rbx, Immediate(Code::kHeaderSize - kHeapObjectTag));
794 void Builtins::Generate_InterpreterExitTrampoline(MacroAssembler* masm) {
795 // TODO(rmcilroy): List of things not currently dealt with here but done in
796 // fullcodegen's EmitReturnSequence.
797 // - Supporting FLAG_trace for Runtime::TraceExit.
798 // - Support profiler (specifically decrementing profiling_counter
799 // appropriately and calling out to HandleInterrupts if necessary).
801 // The return value is in accumulator, which is already in rax.
803 // Leave the frame (also dropping the register file).
806 // Drop receiver + arguments and return.
807 __ movl(rbx, FieldOperand(kInterpreterBytecodeArrayRegister,
808 BytecodeArray::kParameterSizeOffset));
809 __ PopReturnAddressTo(rcx);
811 __ PushReturnAddressFrom(rcx);
816 void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
817 CallRuntimePassFunction(masm, Runtime::kCompileLazy);
818 GenerateTailCallToReturnedCode(masm);
822 static void CallCompileOptimized(MacroAssembler* masm,
824 FrameScope scope(masm, StackFrame::INTERNAL);
825 // Push a copy of the function onto the stack.
827 // Function is also the parameter to the runtime call.
829 // Whether to compile in a background thread.
830 __ Push(masm->isolate()->factory()->ToBoolean(concurrent));
832 __ CallRuntime(Runtime::kCompileOptimized, 2);
838 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
839 CallCompileOptimized(masm, false);
840 GenerateTailCallToReturnedCode(masm);
844 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
845 CallCompileOptimized(masm, true);
846 GenerateTailCallToReturnedCode(masm);
850 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
851 // For now, we are relying on the fact that make_code_young doesn't do any
852 // garbage collection which allows us to save/restore the registers without
853 // worrying about which of them contain pointers. We also don't build an
854 // internal frame to make the code faster, since we shouldn't have to do stack
855 // crawls in MakeCodeYoung. This seems a bit fragile.
857 // Re-execute the code that was patched back to the young age when
859 __ subp(Operand(rsp, 0), Immediate(5));
861 __ Move(arg_reg_2, ExternalReference::isolate_address(masm->isolate()));
862 __ movp(arg_reg_1, Operand(rsp, kNumSafepointRegisters * kPointerSize));
864 FrameScope scope(masm, StackFrame::MANUAL);
865 __ PrepareCallCFunction(2);
867 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
874 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
875 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
876 MacroAssembler* masm) { \
877 GenerateMakeCodeYoungAgainCommon(masm); \
879 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
880 MacroAssembler* masm) { \
881 GenerateMakeCodeYoungAgainCommon(masm); \
883 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
884 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
887 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
888 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
889 // that make_code_young doesn't do any garbage collection which allows us to
890 // save/restore the registers without worrying about which of them contain
893 __ Move(arg_reg_2, ExternalReference::isolate_address(masm->isolate()));
894 __ movp(arg_reg_1, Operand(rsp, kNumSafepointRegisters * kPointerSize));
895 __ subp(arg_reg_1, Immediate(Assembler::kShortCallInstructionLength));
897 FrameScope scope(masm, StackFrame::MANUAL);
898 __ PrepareCallCFunction(2);
900 ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
905 // Perform prologue operations usually performed by the young code stub.
906 __ PopReturnAddressTo(kScratchRegister);
907 __ pushq(rbp); // Caller's frame pointer.
909 __ Push(rsi); // Callee's context.
910 __ Push(rdi); // Callee's JS Function.
911 __ PushReturnAddressFrom(kScratchRegister);
913 // Jump to point after the code-age stub.
918 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
919 GenerateMakeCodeYoungAgainCommon(masm);
923 void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
924 Generate_MarkCodeAsExecutedOnce(masm);
928 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
929 SaveFPRegsMode save_doubles) {
930 // Enter an internal frame.
932 FrameScope scope(masm, StackFrame::INTERNAL);
934 // Preserve registers across notification, this is important for compiled
935 // stubs that tail call the runtime on deopts passing their parameters in
938 __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles);
940 // Tear down internal frame.
943 __ DropUnderReturnAddress(1); // Ignore state offset
944 __ ret(0); // Return to IC Miss stub, continuation still on stack.
948 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
949 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
953 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
954 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
958 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
959 Deoptimizer::BailoutType type) {
960 // Enter an internal frame.
962 FrameScope scope(masm, StackFrame::INTERNAL);
964 // Pass the deoptimization type to the runtime system.
965 __ Push(Smi::FromInt(static_cast<int>(type)));
967 __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
968 // Tear down internal frame.
971 // Get the full codegen state from the stack and untag it.
972 __ SmiToInteger32(kScratchRegister, Operand(rsp, kPCOnStackSize));
974 // Switch on the state.
975 Label not_no_registers, not_tos_rax;
976 __ cmpp(kScratchRegister, Immediate(FullCodeGenerator::NO_REGISTERS));
977 __ j(not_equal, ¬_no_registers, Label::kNear);
978 __ ret(1 * kPointerSize); // Remove state.
980 __ bind(¬_no_registers);
981 __ movp(rax, Operand(rsp, kPCOnStackSize + kPointerSize));
982 __ cmpp(kScratchRegister, Immediate(FullCodeGenerator::TOS_REG));
983 __ j(not_equal, ¬_tos_rax, Label::kNear);
984 __ ret(2 * kPointerSize); // Remove state, rax.
986 __ bind(¬_tos_rax);
987 __ Abort(kNoCasesLeft);
991 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
992 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
996 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
997 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1001 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1002 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1007 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
1009 // rsp[0] : Return address
1010 // rsp[8] : Argument n
1011 // rsp[16] : Argument n-1
1013 // rsp[8 * n] : Argument 1
1014 // rsp[8 * (n + 1)] : Receiver (callable to call)
1016 // rax contains the number of arguments, n, not counting the receiver.
1018 // 1. Make sure we have at least one argument.
1022 __ j(not_zero, &done, Label::kNear);
1023 __ PopReturnAddressTo(rbx);
1024 __ PushRoot(Heap::kUndefinedValueRootIndex);
1025 __ PushReturnAddressFrom(rbx);
1030 // 2. Get the callable to call (passed as receiver) from the stack.
1032 StackArgumentsAccessor args(rsp, rax);
1033 __ movp(rdi, args.GetReceiverOperand());
1036 // 3. Shift arguments and return address one slot down on the stack
1037 // (overwriting the original receiver). Adjust argument count to make
1038 // the original first argument the new receiver.
1042 StackArgumentsAccessor args(rsp, rcx);
1044 __ movp(rbx, args.GetArgumentOperand(1));
1045 __ movp(args.GetArgumentOperand(0), rbx);
1047 __ j(not_zero, &loop); // While non-zero.
1048 __ DropUnderReturnAddress(1, rbx); // Drop one slot under return address.
1049 __ decp(rax); // One fewer argument (first argument is new receiver).
1052 // 4. Call the callable.
1053 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1057 static void Generate_PushAppliedArguments(MacroAssembler* masm,
1058 const int argumentsOffset,
1059 const int indexOffset,
1060 const int limitOffset) {
1061 Register receiver = LoadDescriptor::ReceiverRegister();
1062 Register key = LoadDescriptor::NameRegister();
1063 Register slot = LoadDescriptor::SlotRegister();
1064 Register vector = LoadWithVectorDescriptor::VectorRegister();
1066 // Copy all arguments from the array to the stack.
1068 __ movp(key, Operand(rbp, indexOffset));
1071 __ movp(receiver, Operand(rbp, argumentsOffset)); // load arguments
1073 // Use inline caching to speed up access to arguments.
1074 Code::Kind kinds[] = {Code::KEYED_LOAD_IC};
1075 FeedbackVectorSpec spec(0, 1, kinds);
1076 Handle<TypeFeedbackVector> feedback_vector =
1077 masm->isolate()->factory()->NewTypeFeedbackVector(&spec);
1078 int index = feedback_vector->GetIndex(FeedbackVectorICSlot(0));
1079 __ Move(slot, Smi::FromInt(index));
1080 __ Move(vector, feedback_vector);
1082 KeyedLoadICStub(masm->isolate(), LoadICState(kNoExtraICState)).GetCode();
1083 __ Call(ic, RelocInfo::CODE_TARGET);
1084 // It is important that we do not have a test instruction after the
1085 // call. A test instruction after the call is used to indicate that
1086 // we have generated an inline version of the keyed load. In this
1087 // case, we know that we are not generating a test instruction next.
1089 // Push the nth argument.
1092 // Update the index on the stack and in register key.
1093 __ movp(key, Operand(rbp, indexOffset));
1094 __ SmiAddConstant(key, key, Smi::FromInt(1));
1095 __ movp(Operand(rbp, indexOffset), key);
1098 __ cmpp(key, Operand(rbp, limitOffset));
1099 __ j(not_equal, &loop);
1101 // On exit, the pushed arguments count is in rax, untagged
1102 __ SmiToInteger64(rax, key);
1106 // Used by FunctionApply and ReflectApply
1107 static void Generate_ApplyHelper(MacroAssembler* masm, bool targetIsArgument) {
1108 const int kFormalParameters = targetIsArgument ? 3 : 2;
1109 const int kStackSize = kFormalParameters + 1;
1112 // rsp : return address
1113 // rsp[8] : arguments
1114 // rsp[16] : receiver ("this")
1115 // rsp[24] : function
1117 FrameScope frame_scope(masm, StackFrame::INTERNAL);
1119 // rbp : Old base pointer
1120 // rbp[8] : return address
1121 // rbp[16] : function arguments
1122 // rbp[24] : receiver
1123 // rbp[32] : function
1124 static const int kArgumentsOffset = kFPOnStackSize + kPCOnStackSize;
1125 static const int kReceiverOffset = kArgumentsOffset + kPointerSize;
1126 static const int kFunctionOffset = kReceiverOffset + kPointerSize;
1128 __ Push(Operand(rbp, kFunctionOffset));
1129 __ Push(Operand(rbp, kArgumentsOffset));
1130 if (targetIsArgument) {
1131 __ InvokeBuiltin(Context::REFLECT_APPLY_PREPARE_BUILTIN_INDEX,
1134 __ InvokeBuiltin(Context::APPLY_PREPARE_BUILTIN_INDEX, CALL_FUNCTION);
1137 Generate_CheckStackOverflow(masm, kFunctionOffset, kRaxIsSmiTagged);
1139 // Push current index and limit, and receiver.
1140 const int kLimitOffset =
1141 StandardFrameConstants::kExpressionsOffset - 1 * kPointerSize;
1142 const int kIndexOffset = kLimitOffset - 1 * kPointerSize;
1143 __ Push(rax); // limit
1144 __ Push(Immediate(0)); // index
1145 __ Push(Operand(rbp, kReceiverOffset)); // receiver
1147 // Loop over the arguments array, pushing each value to the stack
1148 Generate_PushAppliedArguments(masm, kArgumentsOffset, kIndexOffset,
1151 // Call the callable.
1152 // TODO(bmeurer): This should be a tail call according to ES6.
1153 __ movp(rdi, Operand(rbp, kFunctionOffset));
1154 __ Call(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1156 // Leave internal frame.
1158 __ ret(kStackSize * kPointerSize); // remove this, receiver, and arguments
1162 // Used by ReflectConstruct
1163 static void Generate_ConstructHelper(MacroAssembler* masm) {
1164 const int kFormalParameters = 3;
1165 const int kStackSize = kFormalParameters + 1;
1168 // rsp : return address
1169 // rsp[8] : original constructor (new.target)
1170 // rsp[16] : arguments
1171 // rsp[24] : constructor
1173 FrameScope frame_scope(masm, StackFrame::INTERNAL);
1175 // rbp : Old base pointer
1176 // rbp[8] : return address
1177 // rbp[16] : original constructor (new.target)
1178 // rbp[24] : arguments
1179 // rbp[32] : constructor
1180 static const int kNewTargetOffset = kFPOnStackSize + kPCOnStackSize;
1181 static const int kArgumentsOffset = kNewTargetOffset + kPointerSize;
1182 static const int kFunctionOffset = kArgumentsOffset + kPointerSize;
1184 // If newTarget is not supplied, set it to constructor
1185 Label validate_arguments;
1186 __ movp(rax, Operand(rbp, kNewTargetOffset));
1187 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
1188 __ j(not_equal, &validate_arguments, Label::kNear);
1189 __ movp(rax, Operand(rbp, kFunctionOffset));
1190 __ movp(Operand(rbp, kNewTargetOffset), rax);
1192 // Validate arguments
1193 __ bind(&validate_arguments);
1194 __ Push(Operand(rbp, kFunctionOffset));
1195 __ Push(Operand(rbp, kArgumentsOffset));
1196 __ Push(Operand(rbp, kNewTargetOffset));
1197 __ InvokeBuiltin(Context::REFLECT_CONSTRUCT_PREPARE_BUILTIN_INDEX,
1200 Generate_CheckStackOverflow(masm, kFunctionOffset, kRaxIsSmiTagged);
1202 // Push current index and limit.
1203 const int kLimitOffset =
1204 StandardFrameConstants::kExpressionsOffset - 1 * kPointerSize;
1205 const int kIndexOffset = kLimitOffset - 1 * kPointerSize;
1206 __ Push(rax); // limit
1207 __ Push(Immediate(0)); // index
1208 // Push the constructor function as callee.
1209 __ Push(Operand(rbp, kFunctionOffset));
1211 // Loop over the arguments array, pushing each value to the stack
1212 Generate_PushAppliedArguments(
1213 masm, kArgumentsOffset, kIndexOffset, kLimitOffset);
1215 // Use undefined feedback vector
1216 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
1217 __ movp(rdi, Operand(rbp, kFunctionOffset));
1218 __ movp(rcx, Operand(rbp, kNewTargetOffset));
1220 // Call the function.
1221 CallConstructStub stub(masm->isolate(), SUPER_CONSTRUCTOR_CALL);
1222 __ call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
1224 // Leave internal frame.
1226 // remove this, target, arguments and newTarget
1227 __ ret(kStackSize * kPointerSize);
1231 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
1232 Generate_ApplyHelper(masm, false);
1236 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1237 Generate_ApplyHelper(masm, true);
1241 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1242 Generate_ConstructHelper(masm);
1246 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
1247 // ----------- S t a t e -------------
1249 // -- rsp[0] : return address
1250 // -- rsp[8] : last argument
1251 // -----------------------------------
1252 Label generic_array_code;
1254 // Get the InternalArray function.
1255 __ LoadGlobalFunction(Context::INTERNAL_ARRAY_FUNCTION_INDEX, rdi);
1257 if (FLAG_debug_code) {
1258 // Initial map for the builtin InternalArray functions should be maps.
1259 __ movp(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
1260 // Will both indicate a NULL and a Smi.
1261 STATIC_ASSERT(kSmiTag == 0);
1262 Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
1263 __ Check(not_smi, kUnexpectedInitialMapForInternalArrayFunction);
1264 __ CmpObjectType(rbx, MAP_TYPE, rcx);
1265 __ Check(equal, kUnexpectedInitialMapForInternalArrayFunction);
1268 // Run the native code for the InternalArray function called as a normal
1271 InternalArrayConstructorStub stub(masm->isolate());
1272 __ TailCallStub(&stub);
1276 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
1277 // ----------- S t a t e -------------
1279 // -- rsp[0] : return address
1280 // -- rsp[8] : last argument
1281 // -----------------------------------
1282 Label generic_array_code;
1284 // Get the Array function.
1285 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rdi);
1287 if (FLAG_debug_code) {
1288 // Initial map for the builtin Array functions should be maps.
1289 __ movp(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
1290 // Will both indicate a NULL and a Smi.
1291 STATIC_ASSERT(kSmiTag == 0);
1292 Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
1293 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
1294 __ CmpObjectType(rbx, MAP_TYPE, rcx);
1295 __ Check(equal, kUnexpectedInitialMapForArrayFunction);
1299 // Run the native code for the Array function called as a normal function.
1301 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
1302 ArrayConstructorStub stub(masm->isolate());
1303 __ TailCallStub(&stub);
1307 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
1308 // ----------- S t a t e -------------
1309 // -- rax : number of arguments
1310 // -- rdi : constructor function
1311 // -- rsp[0] : return address
1312 // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1313 // -- rsp[(argc + 1) * 8] : receiver
1314 // -----------------------------------
1315 Counters* counters = masm->isolate()->counters();
1316 __ IncrementCounter(counters->string_ctor_calls(), 1);
1318 if (FLAG_debug_code) {
1319 __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, rcx);
1321 __ Assert(equal, kUnexpectedStringFunction);
1324 // Load the first argument into rax and get rid of the rest
1325 // (including the receiver).
1326 StackArgumentsAccessor args(rsp, rax);
1329 __ j(zero, &no_arguments);
1330 __ movp(rbx, args.GetArgumentOperand(1));
1331 __ PopReturnAddressTo(rcx);
1332 __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize));
1333 __ PushReturnAddressFrom(rcx);
1336 // Lookup the argument in the number to string cache.
1337 Label not_cached, argument_is_string;
1338 __ LookupNumberStringCache(rax, // Input.
1343 __ IncrementCounter(counters->string_ctor_cached_number(), 1);
1344 __ bind(&argument_is_string);
1346 // ----------- S t a t e -------------
1347 // -- rbx : argument converted to string
1348 // -- rdi : constructor function
1349 // -- rsp[0] : return address
1350 // -----------------------------------
1352 // Allocate a JSValue and put the tagged pointer into rax.
1354 __ Allocate(JSValue::kSize,
1356 rcx, // New allocation top (we ignore it).
1362 __ LoadGlobalFunctionInitialMap(rdi, rcx);
1363 if (FLAG_debug_code) {
1364 __ cmpb(FieldOperand(rcx, Map::kInstanceSizeOffset),
1365 Immediate(JSValue::kSize >> kPointerSizeLog2));
1366 __ Assert(equal, kUnexpectedStringWrapperInstanceSize);
1367 __ cmpb(FieldOperand(rcx, Map::kUnusedPropertyFieldsOffset), Immediate(0));
1368 __ Assert(equal, kUnexpectedUnusedPropertiesOfStringWrapper);
1370 __ movp(FieldOperand(rax, HeapObject::kMapOffset), rcx);
1372 // Set properties and elements.
1373 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
1374 __ movp(FieldOperand(rax, JSObject::kPropertiesOffset), rcx);
1375 __ movp(FieldOperand(rax, JSObject::kElementsOffset), rcx);
1378 __ movp(FieldOperand(rax, JSValue::kValueOffset), rbx);
1380 // Ensure the object is fully initialized.
1381 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
1383 // We're done. Return.
1386 // The argument was not found in the number to string cache. Check
1387 // if it's a string already before calling the conversion builtin.
1388 Label convert_argument;
1389 __ bind(¬_cached);
1390 STATIC_ASSERT(kSmiTag == 0);
1391 __ JumpIfSmi(rax, &convert_argument);
1392 Condition is_string = masm->IsObjectStringType(rax, rbx, rcx);
1393 __ j(NegateCondition(is_string), &convert_argument);
1395 __ IncrementCounter(counters->string_ctor_string_value(), 1);
1396 __ jmp(&argument_is_string);
1398 // Invoke the conversion builtin and put the result into rbx.
1399 __ bind(&convert_argument);
1400 __ IncrementCounter(counters->string_ctor_conversions(), 1);
1402 FrameScope scope(masm, StackFrame::INTERNAL);
1403 __ Push(rdi); // Preserve the function.
1404 ToStringStub stub(masm->isolate());
1409 __ jmp(&argument_is_string);
1411 // Load the empty string into rbx, remove the receiver from the
1412 // stack, and jump back to the case where the argument is a string.
1413 __ bind(&no_arguments);
1414 __ LoadRoot(rbx, Heap::kempty_stringRootIndex);
1415 __ PopReturnAddressTo(rcx);
1416 __ leap(rsp, Operand(rsp, kPointerSize));
1417 __ PushReturnAddressFrom(rcx);
1418 __ jmp(&argument_is_string);
1420 // At this point the argument is already a string. Call runtime to
1421 // create a string wrapper.
1422 __ bind(&gc_required);
1423 __ IncrementCounter(counters->string_ctor_gc_required(), 1);
1425 FrameScope scope(masm, StackFrame::INTERNAL);
1427 __ CallRuntime(Runtime::kNewStringWrapper, 1);
1433 static void ArgumentsAdaptorStackCheck(MacroAssembler* masm,
1434 Label* stack_overflow) {
1435 // ----------- S t a t e -------------
1436 // -- rax : actual number of arguments
1437 // -- rbx : expected number of arguments
1438 // -- rdi: function (passed through to callee)
1439 // -----------------------------------
1440 // Check the stack for overflow. We are not trying to catch
1441 // interruptions (e.g. debug break and preemption) here, so the "real stack
1442 // limit" is checked.
1444 __ LoadRoot(rdx, Heap::kRealStackLimitRootIndex);
1446 // Make rcx the space we have left. The stack might already be overflowed
1447 // here which will cause rcx to become negative.
1449 // Make rdx the space we need for the array when it is unrolled onto the
1452 __ shlp(rdx, Immediate(kPointerSizeLog2));
1453 // Check if the arguments will overflow the stack.
1455 __ j(less_equal, stack_overflow); // Signed comparison.
1459 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1463 // Store the arguments adaptor context sentinel.
1464 __ Push(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
1466 // Push the function on the stack.
1469 // Preserve the number of arguments on the stack. Must preserve rax,
1470 // rbx and rcx because these registers are used when copying the
1471 // arguments and the receiver.
1472 __ Integer32ToSmi(r8, rax);
1477 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1478 // Retrieve the number of arguments from the stack. Number is a Smi.
1479 __ movp(rbx, Operand(rbp, ArgumentsAdaptorFrameConstants::kLengthOffset));
1485 // Remove caller arguments from the stack.
1486 __ PopReturnAddressTo(rcx);
1487 SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
1488 __ leap(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
1489 __ PushReturnAddressFrom(rcx);
1493 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1494 // ----------- S t a t e -------------
1495 // -- rax : actual number of arguments
1496 // -- rbx : expected number of arguments
1497 // -- rdi: function (passed through to callee)
1498 // -----------------------------------
1500 Label invoke, dont_adapt_arguments;
1501 Counters* counters = masm->isolate()->counters();
1502 __ IncrementCounter(counters->arguments_adaptors(), 1);
1504 Label stack_overflow;
1505 ArgumentsAdaptorStackCheck(masm, &stack_overflow);
1507 Label enough, too_few;
1508 __ movp(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
1510 __ j(less, &too_few);
1511 __ cmpp(rbx, Immediate(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
1512 __ j(equal, &dont_adapt_arguments);
1514 { // Enough parameters: Actual >= expected.
1516 EnterArgumentsAdaptorFrame(masm);
1518 // Copy receiver and all expected arguments.
1519 const int offset = StandardFrameConstants::kCallerSPOffset;
1520 __ leap(rax, Operand(rbp, rax, times_pointer_size, offset));
1521 __ Set(r8, -1); // account for receiver
1526 __ Push(Operand(rax, 0));
1527 __ subp(rax, Immediate(kPointerSize));
1533 { // Too few parameters: Actual < expected.
1536 // If the function is strong we need to throw an error.
1537 Label no_strong_error;
1538 __ movp(kScratchRegister,
1539 FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
1540 __ testb(FieldOperand(kScratchRegister,
1541 SharedFunctionInfo::kStrongModeByteOffset),
1542 Immediate(1 << SharedFunctionInfo::kStrongModeBitWithinByte));
1543 __ j(equal, &no_strong_error, Label::kNear);
1545 // What we really care about is the required number of arguments.
1547 if (kPointerSize == kInt32Size) {
1550 FieldOperand(kScratchRegister, SharedFunctionInfo::kLengthOffset));
1551 __ SmiToInteger32(kScratchRegister, kScratchRegister);
1553 // See comment near kLengthOffset in src/objects.h
1556 FieldOperand(kScratchRegister, SharedFunctionInfo::kLengthOffset));
1557 __ shrq(kScratchRegister, Immediate(1));
1560 __ cmpp(rax, kScratchRegister);
1561 __ j(greater_equal, &no_strong_error, Label::kNear);
1564 FrameScope frame(masm, StackFrame::MANUAL);
1565 EnterArgumentsAdaptorFrame(masm);
1566 __ CallRuntime(Runtime::kThrowStrongModeTooFewArguments, 0);
1569 __ bind(&no_strong_error);
1570 EnterArgumentsAdaptorFrame(masm);
1572 // Copy receiver and all actual arguments.
1573 const int offset = StandardFrameConstants::kCallerSPOffset;
1574 __ leap(rdi, Operand(rbp, rax, times_pointer_size, offset));
1575 __ Set(r8, -1); // account for receiver
1580 __ Push(Operand(rdi, 0));
1581 __ subp(rdi, Immediate(kPointerSize));
1585 // Fill remaining expected arguments with undefined values.
1587 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
1590 __ Push(kScratchRegister);
1594 // Restore function pointer.
1595 __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1598 // Call the entry point.
1601 // rax : expected number of arguments
1602 // rdi: function (passed through to callee)
1605 // Store offset of return address for deoptimizer.
1606 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1608 // Leave frame and return.
1609 LeaveArgumentsAdaptorFrame(masm);
1612 // -------------------------------------------
1613 // Dont adapt arguments.
1614 // -------------------------------------------
1615 __ bind(&dont_adapt_arguments);
1618 __ bind(&stack_overflow);
1620 FrameScope frame(masm, StackFrame::MANUAL);
1621 EnterArgumentsAdaptorFrame(masm);
1622 __ InvokeBuiltin(Context::STACK_OVERFLOW_BUILTIN_INDEX, CALL_FUNCTION);
1629 void Builtins::Generate_CallFunction(MacroAssembler* masm) {
1630 // ----------- S t a t e -------------
1631 // -- rax : the number of arguments (not including the receiver)
1632 // -- rdi : the function to call (checked to be a JSFunction)
1633 // -----------------------------------
1635 Label convert, convert_global_proxy, convert_to_object, done_convert;
1636 StackArgumentsAccessor args(rsp, rax);
1637 __ AssertFunction(rdi);
1638 // TODO(bmeurer): Throw a TypeError if function's [[FunctionKind]] internal
1639 // slot is "classConstructor".
1640 // Enter the context of the function; ToObject has to run in the function
1641 // context, and we also need to take the global proxy from the function
1642 // context in case of conversion.
1643 // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
1644 STATIC_ASSERT(SharedFunctionInfo::kNativeByteOffset ==
1645 SharedFunctionInfo::kStrictModeByteOffset);
1646 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
1647 __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
1648 // We need to convert the receiver for non-native sloppy mode functions.
1649 __ testb(FieldOperand(rdx, SharedFunctionInfo::kNativeByteOffset),
1650 Immediate((1 << SharedFunctionInfo::kNativeBitWithinByte) |
1651 (1 << SharedFunctionInfo::kStrictModeBitWithinByte)));
1652 __ j(not_zero, &done_convert);
1654 __ movp(rcx, args.GetReceiverOperand());
1656 // ----------- S t a t e -------------
1657 // -- rax : the number of arguments (not including the receiver)
1658 // -- rcx : the receiver
1659 // -- rdx : the shared function info.
1660 // -- rdi : the function to call (checked to be a JSFunction)
1661 // -- rsi : the function context.
1662 // -----------------------------------
1664 Label convert_receiver;
1665 __ JumpIfSmi(rcx, &convert_to_object, Label::kNear);
1666 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
1667 __ CmpObjectType(rcx, FIRST_JS_RECEIVER_TYPE, rbx);
1668 __ j(above_equal, &done_convert);
1669 __ JumpIfRoot(rcx, Heap::kUndefinedValueRootIndex, &convert_global_proxy,
1671 __ JumpIfNotRoot(rcx, Heap::kNullValueRootIndex, &convert_to_object,
1673 __ bind(&convert_global_proxy);
1675 // Patch receiver to global proxy.
1676 __ LoadGlobalProxy(rcx);
1678 __ jmp(&convert_receiver);
1679 __ bind(&convert_to_object);
1681 // Convert receiver using ToObject.
1682 // TODO(bmeurer): Inline the allocation here to avoid building the frame
1683 // in the fast case? (fall back to AllocateInNewSpace?)
1684 FrameScope scope(masm, StackFrame::INTERNAL);
1685 __ Integer32ToSmi(rax, rax);
1689 ToObjectStub stub(masm->isolate());
1694 __ SmiToInteger32(rax, rax);
1696 __ movp(rdx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
1697 __ bind(&convert_receiver);
1698 __ movp(args.GetReceiverOperand(), rcx);
1700 __ bind(&done_convert);
1702 // ----------- S t a t e -------------
1703 // -- rax : the number of arguments (not including the receiver)
1704 // -- rdx : the shared function info.
1705 // -- rdi : the function to call (checked to be a JSFunction)
1706 // -- rsi : the function context.
1707 // -----------------------------------
1709 __ LoadSharedFunctionInfoSpecialField(
1710 rbx, rdx, SharedFunctionInfo::kFormalParameterCountOffset);
1711 __ movp(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
1712 ParameterCount actual(rax);
1713 ParameterCount expected(rbx);
1714 __ InvokeCode(rdx, expected, actual, JUMP_FUNCTION, NullCallWrapper());
1719 void Builtins::Generate_Call(MacroAssembler* masm) {
1720 // ----------- S t a t e -------------
1721 // -- rax : the number of arguments (not including the receiver)
1722 // -- rdi : the target to call (can be any Object).
1723 // -----------------------------------
1725 Label non_smi, non_function;
1726 __ JumpIfSmi(rdi, &non_function);
1728 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rdx);
1729 __ j(equal, masm->isolate()->builtins()->CallFunction(),
1730 RelocInfo::CODE_TARGET);
1731 __ CmpInstanceType(rdx, JS_FUNCTION_PROXY_TYPE);
1732 __ j(not_equal, &non_function);
1734 // 1. Call to function proxy.
1735 // TODO(neis): This doesn't match the ES6 spec for [[Call]] on proxies.
1736 __ movp(rdi, FieldOperand(rdi, JSFunctionProxy::kCallTrapOffset));
1737 __ AssertNotSmi(rdi);
1740 // 2. Call to something else, which might have a [[Call]] internal method (if
1741 // not we raise an exception).
1742 __ bind(&non_function);
1743 // TODO(bmeurer): I wonder why we prefer to have slow API calls? This could
1744 // be awesome instead; i.e. a trivial improvement would be to call into the
1745 // runtime and just deal with the API function there instead of returning a
1746 // delegate from a runtime call that just jumps back to the runtime once
1747 // called. Or, bonus points, call directly into the C API function here, as
1748 // we do in some Crankshaft fast cases.
1749 StackArgumentsAccessor args(rsp, rax);
1750 // Overwrite the original receiver with the (original) target.
1751 __ movp(args.GetReceiverOperand(), rdi);
1753 // Determine the delegate for the target (if any).
1754 FrameScope scope(masm, StackFrame::INTERNAL);
1755 __ Integer32ToSmi(rax, rax);
1758 __ CallRuntime(Runtime::kGetFunctionDelegate, 1);
1761 __ SmiToInteger32(rax, rax);
1763 // The delegate is always a regular function.
1764 __ AssertFunction(rdi);
1765 __ Jump(masm->isolate()->builtins()->CallFunction(), RelocInfo::CODE_TARGET);
1769 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1770 // Lookup the function in the JavaScript frame.
1771 __ movp(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1773 FrameScope scope(masm, StackFrame::INTERNAL);
1774 // Pass function as argument.
1776 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1780 // If the code object is null, just return to the unoptimized code.
1781 __ cmpp(rax, Immediate(0));
1782 __ j(not_equal, &skip, Label::kNear);
1787 // Load deoptimization data from the code object.
1788 __ movp(rbx, Operand(rax, Code::kDeoptimizationDataOffset - kHeapObjectTag));
1790 // Load the OSR entrypoint offset from the deoptimization data.
1791 __ SmiToInteger32(rbx, Operand(rbx, FixedArray::OffsetOfElementAt(
1792 DeoptimizationInputData::kOsrPcOffsetIndex) - kHeapObjectTag));
1794 // Compute the target address = code_obj + header_size + osr_offset
1795 __ leap(rax, Operand(rax, rbx, times_1, Code::kHeaderSize - kHeapObjectTag));
1797 // Overwrite the return address on the stack.
1798 __ movq(StackOperandForReturnAddress(0), rax);
1800 // And "return" to the OSR entry point of the function.
1805 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
1806 // We check the stack limit as indicator that recompilation might be done.
1808 __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
1809 __ j(above_equal, &ok);
1811 FrameScope scope(masm, StackFrame::INTERNAL);
1812 __ CallRuntime(Runtime::kStackGuard, 0);
1814 __ jmp(masm->isolate()->builtins()->OnStackReplacement(),
1815 RelocInfo::CODE_TARGET);
1824 } // namespace internal
1827 #endif // V8_TARGET_ARCH_X64